blob: ec54376dcac98d9e5fb2e80ac6051bfb3b7a1517 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000019#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010025#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070026#include "heap_poisoning.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080027#include "intrinsics.h"
28#include "intrinsics_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000029#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070031#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070032#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070033#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010034#include "mirror/object_reference.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000035#include "scoped_thread_state_change-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "thread.h"
37#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010038#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010039#include "utils/x86_64/assembler_x86_64.h"
40#include "utils/x86_64/managed_register_x86_64.h"
41
Vladimir Marko0a516052019-10-14 13:00:44 +000042namespace art {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010043
Roland Levillain0d5a2812015-11-13 10:07:31 +000044template<class MirrorType>
45class GcRoot;
46
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010047namespace x86_64 {
48
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010049static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010050static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000051// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
52// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
53// generates less code/data with a small num_entries.
54static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010055
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000056static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000057static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010058
Mark Mendell24f2dfa2015-01-14 19:51:45 -050059static constexpr int kC2ConditionMask = 0x400;
60
Vladimir Marko3232dbb2018-07-25 15:42:46 +010061static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
62 // Custom calling convention: RAX serves as both input and output.
63 RegisterSet caller_saves = RegisterSet::Empty();
64 caller_saves.Add(Location::RegisterLocation(RAX));
65 return caller_saves;
66}
67
Roland Levillain7cbd27f2016-08-11 23:53:33 +010068// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
69#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070070#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010071
Andreas Gampe85b62f22015-09-09 13:15:38 -070072class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000074 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010075
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010076 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +000077 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010078 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000079 if (instruction_->CanThrowIntoCatchBlock()) {
80 // Live registers will be restored in the catch block if caught.
81 SaveLiveRegisters(codegen, instruction_->GetLocations());
82 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010083 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000084 instruction_,
85 instruction_->GetDexPc(),
86 this);
Roland Levillain888d0672015-11-23 18:53:50 +000087 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010088 }
89
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010090 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010091
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010092 const char* GetDescription() const override { return "NullCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010093
Nicolas Geoffraye5038322014-07-04 09:41:32 +010094 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010095 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
96};
97
Andreas Gampe85b62f22015-09-09 13:15:38 -070098class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000099 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000100 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000101
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100102 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000103 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000104 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100105 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000106 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000107 }
108
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100109 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100110
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100111 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100112
Calin Juravled0d48522014-11-04 16:40:20 +0000113 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000114 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
115};
116
Andreas Gampe85b62f22015-09-09 13:15:38 -0700117class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000118 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100119 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000121
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100122 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000123 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100124 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000125 if (is_div_) {
126 __ negl(cpu_reg_);
127 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400128 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000129 }
130
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100132 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 if (is_div_) {
134 __ negq(cpu_reg_);
135 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400136 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000138 }
Calin Juravled0d48522014-11-04 16:40:20 +0000139 __ jmp(GetExitLabel());
140 }
141
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100142 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100143
Calin Juravled0d48522014-11-04 16:40:20 +0000144 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000145 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100146 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000147 const bool is_div_;
148 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000149};
150
Andreas Gampe85b62f22015-09-09 13:15:38 -0700151class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100153 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000154 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000155
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100156 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700157 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000158 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000159 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700160 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100161 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000162 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700163 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100164 if (successor_ == nullptr) {
165 __ jmp(GetReturnLabel());
166 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000167 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100168 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000169 }
170
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100171 Label* GetReturnLabel() {
172 DCHECK(successor_ == nullptr);
173 return &return_label_;
174 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000175
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100176 HBasicBlock* GetSuccessor() const {
177 return successor_;
178 }
179
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100180 const char* GetDescription() const override { return "SuspendCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100181
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000182 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100183 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184 Label return_label_;
185
186 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
187};
188
Andreas Gampe85b62f22015-09-09 13:15:38 -0700189class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100191 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000192 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100193
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100194 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100195 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000196 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100197 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000198 if (instruction_->CanThrowIntoCatchBlock()) {
199 // Live registers will be restored in the catch block if caught.
200 SaveLiveRegisters(codegen, instruction_->GetLocations());
201 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400202 // Are we using an array length from memory?
203 HInstruction* array_length = instruction_->InputAt(1);
204 Location length_loc = locations->InAt(1);
205 InvokeRuntimeCallingConvention calling_convention;
206 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
207 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100208 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100209 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400210 Location array_loc = array_length->GetLocations()->InAt(0);
211 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
212 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
213 // Check for conflicts with index.
214 if (length_loc.Equals(locations->InAt(0))) {
215 // We know we aren't using parameter 2.
216 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
217 }
218 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100219 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100220 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700221 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400222 }
223
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000224 // We're moving two locations to locations that could overlap, so we need a parallel
225 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000226 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000228 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100229 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400230 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100231 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100232 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100233 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
234 ? kQuickThrowStringBounds
235 : kQuickThrowArrayBounds;
236 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100237 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000238 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 }
240
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100241 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100242
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100243 const char* GetDescription() const override { return "BoundsCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100244
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100245 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100246 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
247};
248
Andreas Gampe85b62f22015-09-09 13:15:38 -0700249class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100250 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100251 LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
252 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000253 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100254 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100257 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000258 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100259 Location out = locations->Out();
260 const uint32_t dex_pc = instruction_->GetDexPc();
261 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
262 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
263
Roland Levillain0d5a2812015-11-13 10:07:31 +0000264 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000266 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000267
Vladimir Markoea4c1262017-02-06 19:59:33 +0000268 // Custom calling convention: RAX serves as both input and output.
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100269 if (must_resolve_type) {
270 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_64_codegen->GetGraph()->GetDexFile()));
271 dex::TypeIndex type_index = cls_->GetTypeIndex();
272 __ movl(CpuRegister(RAX), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100273 x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
274 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100275 // If we also must_do_clinit, the resolved type is now in the correct register.
276 } else {
277 DCHECK(must_do_clinit);
278 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
279 x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
280 }
281 if (must_do_clinit) {
282 x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
283 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000284 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000287 if (out.IsValid()) {
288 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000289 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000290 }
291
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100293 __ jmp(GetExitLabel());
294 }
295
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100296 const char* GetDescription() const override { return "LoadClassSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100297
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100298 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000299 // The class this slow path will load.
300 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100301
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000302 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100303};
304
Vladimir Markoaad75c62016-10-03 08:46:48 +0000305class LoadStringSlowPathX86_64 : public SlowPathCode {
306 public:
307 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
308
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100309 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000310 LocationSummary* locations = instruction_->GetLocations();
311 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
312
313 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
314 __ Bind(GetEntryLabel());
315 SaveLiveRegisters(codegen, locations);
316
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000317 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100318 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000319 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000320 x86_64_codegen->InvokeRuntime(kQuickResolveString,
321 instruction_,
322 instruction_->GetDexPc(),
323 this);
324 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
325 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
326 RestoreLiveRegisters(codegen, locations);
327
Vladimir Markoaad75c62016-10-03 08:46:48 +0000328 __ jmp(GetExitLabel());
329 }
330
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100331 const char* GetDescription() const override { return "LoadStringSlowPathX86_64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000332
333 private:
334 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
335};
336
Andreas Gampe85b62f22015-09-09 13:15:38 -0700337class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000338 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000339 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000340 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100342 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000351 if (kPoisonHeapReferences &&
352 instruction_->IsCheckCast() &&
353 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
354 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
355 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
356 }
357
Vladimir Marko87584542017-12-12 17:47:52 +0000358 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000359 SaveLiveRegisters(codegen, locations);
360 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000361
362 // We're moving two locations to locations that could overlap, so we need a parallel
363 // move resolver.
364 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800366 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100367 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800368 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800369 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100370 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000371 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100372 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800373 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000374 } else {
375 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
377 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000378 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000379
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 if (!is_fatal_) {
381 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000382 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000384
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 RestoreLiveRegisters(codegen, locations);
386 __ jmp(GetExitLabel());
387 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000388 }
389
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100390 const char* GetDescription() const override { return "TypeCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100391
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100392 bool IsFatal() const override { return is_fatal_; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000393
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000394 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000395 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396
397 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
398};
399
Andreas Gampe85b62f22015-09-09 13:15:38 -0700400class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700401 public:
Aart Bik42249c32016-01-07 15:33:50 -0800402 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000403 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100405 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000406 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100408 LocationSummary* locations = instruction_->GetLocations();
409 SaveLiveRegisters(codegen, locations);
410 InvokeRuntimeCallingConvention calling_convention;
411 x86_64_codegen->Load32BitValue(
412 CpuRegister(calling_convention.GetRegisterAt(0)),
413 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100414 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100415 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100418 const char* GetDescription() const override { return "DeoptimizationSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100428 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100434 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100438 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100443 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100448 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100453 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000454 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100455 RestoreLiveRegisters(codegen, locations);
456 __ jmp(GetExitLabel());
457 }
458
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100459 const char* GetDescription() const override { return "ArraySetSlowPathX86_64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100460
461 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100462 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
463};
464
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100465// Slow path marking an object reference `ref` during a read
466// barrier. The field `obj.field` in the object `obj` holding this
467// reference does not get updated by this slow path after marking (see
468// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
469//
470// This means that after the execution of this slow path, `ref` will
471// always be up-to-date, but `obj.field` may not; i.e., after the
472// flip, `ref` will be a to-space reference, but `obj.field` will
473// probably still be a from-space reference (unless it gets updated by
474// another thread, or if another thread installed another object
475// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000476class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
477 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100478 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
479 Location ref,
480 bool unpoison_ref_before_marking)
481 : SlowPathCode(instruction),
482 ref_(ref),
483 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 DCHECK(kEmitCompilerReadBarrier);
485 }
486
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100487 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86_64"; }
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100489 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000490 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100491 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
492 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000493 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100494 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 DCHECK(instruction_->IsInstanceFieldGet() ||
496 instruction_->IsStaticFieldGet() ||
497 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100498 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 instruction_->IsLoadClass() ||
500 instruction_->IsLoadString() ||
501 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100502 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100503 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
504 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000505 << "Unexpected instruction in read barrier marking slow path: "
506 << instruction_->DebugName();
507
508 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100509 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000510 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100511 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000512 }
Roland Levillain4359e612016-07-20 11:32:19 +0100513 // No need to save live registers; it's taken care of by the
514 // entrypoint. Also, there is no need to update the stack mask,
515 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100517 DCHECK_NE(ref_reg, RSP);
518 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100519 // "Compact" slow path, saving two moves.
520 //
521 // Instead of using the standard runtime calling convention (input
522 // and output in R0):
523 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100524 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100525 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100527 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100529 // of a dedicated entrypoint:
530 //
531 // rX <- ReadBarrierMarkRegX(rX)
532 //
533 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100534 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100535 // This runtime call does not require a stack map.
536 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000537 __ jmp(GetExitLabel());
538 }
539
540 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100541 // The location (register) of the marked object reference.
542 const Location ref_;
543 // Should the reference in `ref_` be unpoisoned prior to marking it?
544 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000545
546 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
547};
548
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100549// Slow path marking an object reference `ref` during a read barrier,
550// and if needed, atomically updating the field `obj.field` in the
551// object `obj` holding this reference after marking (contrary to
552// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
553// `obj.field`).
554//
555// This means that after the execution of this slow path, both `ref`
556// and `obj.field` will be up-to-date; i.e., after the flip, both will
557// hold the same to-space reference (unless another thread installed
558// another object reference (different from `ref`) in `obj.field`).
559class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
560 public:
561 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
562 Location ref,
563 CpuRegister obj,
564 const Address& field_addr,
565 bool unpoison_ref_before_marking,
566 CpuRegister temp1,
567 CpuRegister temp2)
568 : SlowPathCode(instruction),
569 ref_(ref),
570 obj_(obj),
571 field_addr_(field_addr),
572 unpoison_ref_before_marking_(unpoison_ref_before_marking),
573 temp1_(temp1),
574 temp2_(temp2) {
575 DCHECK(kEmitCompilerReadBarrier);
576 }
577
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100578 const char* GetDescription() const override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100579 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
580 }
581
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100582 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100583 LocationSummary* locations = instruction_->GetLocations();
584 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
585 Register ref_reg = ref_cpu_reg.AsRegister();
586 DCHECK(locations->CanCall());
587 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
588 // This slow path is only used by the UnsafeCASObject intrinsic.
589 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
590 << "Unexpected instruction in read barrier marking and field updating slow path: "
591 << instruction_->DebugName();
592 DCHECK(instruction_->GetLocations()->Intrinsified());
593 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
594
595 __ Bind(GetEntryLabel());
596 if (unpoison_ref_before_marking_) {
597 // Object* ref = ref_addr->AsMirrorPtr()
598 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
599 }
600
601 // Save the old (unpoisoned) reference.
602 __ movl(temp1_, ref_cpu_reg);
603
604 // No need to save live registers; it's taken care of by the
605 // entrypoint. Also, there is no need to update the stack mask,
606 // as this runtime call will not trigger a garbage collection.
607 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
608 DCHECK_NE(ref_reg, RSP);
609 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
610 // "Compact" slow path, saving two moves.
611 //
612 // Instead of using the standard runtime calling convention (input
613 // and output in R0):
614 //
615 // RDI <- ref
616 // RAX <- ReadBarrierMark(RDI)
617 // ref <- RAX
618 //
619 // we just use rX (the register containing `ref`) as input and output
620 // of a dedicated entrypoint:
621 //
622 // rX <- ReadBarrierMarkRegX(rX)
623 //
624 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100625 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100626 // This runtime call does not require a stack map.
627 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
628
629 // If the new reference is different from the old reference,
630 // update the field in the holder (`*field_addr`).
631 //
632 // Note that this field could also hold a different object, if
633 // another thread had concurrently changed it. In that case, the
634 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
635 // operation below would abort the CAS, leaving the field as-is.
636 NearLabel done;
637 __ cmpl(temp1_, ref_cpu_reg);
638 __ j(kEqual, &done);
639
640 // Update the the holder's field atomically. This may fail if
641 // mutator updates before us, but it's OK. This is achived
642 // using a strong compare-and-set (CAS) operation with relaxed
643 // memory synchronization ordering, where the expected value is
644 // the old reference and the desired value is the new reference.
645 // This operation is implemented with a 32-bit LOCK CMPXLCHG
646 // instruction, which requires the expected value (the old
647 // reference) to be in EAX. Save RAX beforehand, and move the
648 // expected value (stored in `temp1_`) into EAX.
649 __ movq(temp2_, CpuRegister(RAX));
650 __ movl(CpuRegister(RAX), temp1_);
651
652 // Convenience aliases.
653 CpuRegister base = obj_;
654 CpuRegister expected = CpuRegister(RAX);
655 CpuRegister value = ref_cpu_reg;
656
657 bool base_equals_value = (base.AsRegister() == value.AsRegister());
658 Register value_reg = ref_reg;
659 if (kPoisonHeapReferences) {
660 if (base_equals_value) {
661 // If `base` and `value` are the same register location, move
662 // `value_reg` to a temporary register. This way, poisoning
663 // `value_reg` won't invalidate `base`.
664 value_reg = temp1_.AsRegister();
665 __ movl(CpuRegister(value_reg), base);
666 }
667
668 // Check that the register allocator did not assign the location
669 // of `expected` (RAX) to `value` nor to `base`, so that heap
670 // poisoning (when enabled) works as intended below.
671 // - If `value` were equal to `expected`, both references would
672 // be poisoned twice, meaning they would not be poisoned at
673 // all, as heap poisoning uses address negation.
674 // - If `base` were equal to `expected`, poisoning `expected`
675 // would invalidate `base`.
676 DCHECK_NE(value_reg, expected.AsRegister());
677 DCHECK_NE(base.AsRegister(), expected.AsRegister());
678
679 __ PoisonHeapReference(expected);
680 __ PoisonHeapReference(CpuRegister(value_reg));
681 }
682
683 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
684
685 // If heap poisoning is enabled, we need to unpoison the values
686 // that were poisoned earlier.
687 if (kPoisonHeapReferences) {
688 if (base_equals_value) {
689 // `value_reg` has been moved to a temporary register, no need
690 // to unpoison it.
691 } else {
692 __ UnpoisonHeapReference(CpuRegister(value_reg));
693 }
694 // No need to unpoison `expected` (RAX), as it is be overwritten below.
695 }
696
697 // Restore RAX.
698 __ movq(CpuRegister(RAX), temp2_);
699
700 __ Bind(&done);
701 __ jmp(GetExitLabel());
702 }
703
704 private:
705 // The location (register) of the marked object reference.
706 const Location ref_;
707 // The register containing the object holding the marked object reference field.
708 const CpuRegister obj_;
709 // The address of the marked reference field. The base of this address must be `obj_`.
710 const Address field_addr_;
711
712 // Should the reference in `ref_` be unpoisoned prior to marking it?
713 const bool unpoison_ref_before_marking_;
714
715 const CpuRegister temp1_;
716 const CpuRegister temp2_;
717
718 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
719};
720
Roland Levillain0d5a2812015-11-13 10:07:31 +0000721// Slow path generating a read barrier for a heap reference.
722class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
723 public:
724 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
725 Location out,
726 Location ref,
727 Location obj,
728 uint32_t offset,
729 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000730 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000731 out_(out),
732 ref_(ref),
733 obj_(obj),
734 offset_(offset),
735 index_(index) {
736 DCHECK(kEmitCompilerReadBarrier);
737 // If `obj` is equal to `out` or `ref`, it means the initial
738 // object has been overwritten by (or after) the heap object
739 // reference load to be instrumented, e.g.:
740 //
741 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000742 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743 //
744 // In that case, we have lost the information about the original
745 // object, and the emitted read barrier cannot work properly.
746 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
747 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
748}
749
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100750 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000751 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
752 LocationSummary* locations = instruction_->GetLocations();
753 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
754 DCHECK(locations->CanCall());
755 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100756 DCHECK(instruction_->IsInstanceFieldGet() ||
757 instruction_->IsStaticFieldGet() ||
758 instruction_->IsArrayGet() ||
759 instruction_->IsInstanceOf() ||
760 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700761 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000762 << "Unexpected instruction in read barrier for heap reference slow path: "
763 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000764
765 __ Bind(GetEntryLabel());
766 SaveLiveRegisters(codegen, locations);
767
768 // We may have to change the index's value, but as `index_` is a
769 // constant member (like other "inputs" of this slow path),
770 // introduce a copy of it, `index`.
771 Location index = index_;
772 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100773 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000774 if (instruction_->IsArrayGet()) {
775 // Compute real offset and store it in index_.
776 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
777 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
778 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
779 // We are about to change the value of `index_reg` (see the
780 // calls to art::x86_64::X86_64Assembler::shll and
781 // art::x86_64::X86_64Assembler::AddImmediate below), but it
782 // has not been saved by the previous call to
783 // art::SlowPathCode::SaveLiveRegisters, as it is a
784 // callee-save register --
785 // art::SlowPathCode::SaveLiveRegisters does not consider
786 // callee-save registers, as it has been designed with the
787 // assumption that callee-save registers are supposed to be
788 // handled by the called function. So, as a callee-save
789 // register, `index_reg` _would_ eventually be saved onto
790 // the stack, but it would be too late: we would have
791 // changed its value earlier. Therefore, we manually save
792 // it here into another freely available register,
793 // `free_reg`, chosen of course among the caller-save
794 // registers (as a callee-save `free_reg` register would
795 // exhibit the same problem).
796 //
797 // Note we could have requested a temporary register from
798 // the register allocator instead; but we prefer not to, as
799 // this is a slow path, and we know we can find a
800 // caller-save register that is available.
801 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
802 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
803 index_reg = free_reg;
804 index = Location::RegisterLocation(index_reg);
805 } else {
806 // The initial register stored in `index_` has already been
807 // saved in the call to art::SlowPathCode::SaveLiveRegisters
808 // (as it is not a callee-save register), so we can freely
809 // use it.
810 }
811 // Shifting the index value contained in `index_reg` by the
812 // scale factor (2) cannot overflow in practice, as the
813 // runtime is unable to allocate object arrays with a size
814 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
815 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
816 static_assert(
817 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
818 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
819 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
820 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100821 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
822 // intrinsics, `index_` is not shifted by a scale factor of 2
823 // (as in the case of ArrayGet), as it is actually an offset
824 // to an object field within an object.
825 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000826 DCHECK(instruction_->GetLocations()->Intrinsified());
827 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
828 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
829 << instruction_->AsInvoke()->GetIntrinsic();
830 DCHECK_EQ(offset_, 0U);
831 DCHECK(index_.IsRegister());
832 }
833 }
834
835 // We're moving two or three locations to locations that could
836 // overlap, so we need a parallel move resolver.
837 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100838 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000839 parallel_move.AddMove(ref_,
840 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100841 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000842 nullptr);
843 parallel_move.AddMove(obj_,
844 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100845 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000846 nullptr);
847 if (index.IsValid()) {
848 parallel_move.AddMove(index,
849 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100850 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000851 nullptr);
852 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
853 } else {
854 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
855 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
856 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100857 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000858 instruction_,
859 instruction_->GetDexPc(),
860 this);
861 CheckEntrypointTypes<
862 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
863 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
864
865 RestoreLiveRegisters(codegen, locations);
866 __ jmp(GetExitLabel());
867 }
868
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100869 const char* GetDescription() const override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 return "ReadBarrierForHeapReferenceSlowPathX86_64";
871 }
872
873 private:
874 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
875 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
876 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
877 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
878 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
879 return static_cast<CpuRegister>(i);
880 }
881 }
882 // We shall never fail to find a free caller-save register, as
883 // there are more than two core caller-save registers on x86-64
884 // (meaning it is possible to find one which is different from
885 // `ref` and `obj`).
886 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
887 LOG(FATAL) << "Could not find a free caller-save register";
888 UNREACHABLE();
889 }
890
Roland Levillain0d5a2812015-11-13 10:07:31 +0000891 const Location out_;
892 const Location ref_;
893 const Location obj_;
894 const uint32_t offset_;
895 // An additional location containing an index to an array.
896 // Only used for HArrayGet and the UnsafeGetObject &
897 // UnsafeGetObjectVolatile intrinsics.
898 const Location index_;
899
900 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
901};
902
903// Slow path generating a read barrier for a GC root.
904class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
905 public:
906 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000907 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000908 DCHECK(kEmitCompilerReadBarrier);
909 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000910
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100911 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000912 LocationSummary* locations = instruction_->GetLocations();
913 DCHECK(locations->CanCall());
914 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000915 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
916 << "Unexpected instruction in read barrier for GC root slow path: "
917 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918
919 __ Bind(GetEntryLabel());
920 SaveLiveRegisters(codegen, locations);
921
922 InvokeRuntimeCallingConvention calling_convention;
923 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
924 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100925 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000926 instruction_,
927 instruction_->GetDexPc(),
928 this);
929 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
930 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
931
932 RestoreLiveRegisters(codegen, locations);
933 __ jmp(GetExitLabel());
934 }
935
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100936 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86_64"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000937
938 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000939 const Location out_;
940 const Location root_;
941
942 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
943};
944
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100945#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100946// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
947#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100948
Roland Levillain4fa13f62015-07-06 18:11:54 +0100949inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700950 switch (cond) {
951 case kCondEQ: return kEqual;
952 case kCondNE: return kNotEqual;
953 case kCondLT: return kLess;
954 case kCondLE: return kLessEqual;
955 case kCondGT: return kGreater;
956 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700957 case kCondB: return kBelow;
958 case kCondBE: return kBelowEqual;
959 case kCondA: return kAbove;
960 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700961 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100962 LOG(FATAL) << "Unreachable";
963 UNREACHABLE();
964}
965
Aart Bike9f37602015-10-09 11:15:55 -0700966// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100967inline Condition X86_64FPCondition(IfCondition cond) {
968 switch (cond) {
969 case kCondEQ: return kEqual;
970 case kCondNE: return kNotEqual;
971 case kCondLT: return kBelow;
972 case kCondLE: return kBelowEqual;
973 case kCondGT: return kAbove;
974 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700975 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800976 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100977 LOG(FATAL) << "Unreachable";
978 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700979}
980
Vladimir Markodc151b22015-10-15 18:02:30 +0100981HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
982 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +0100983 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000984 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100985}
986
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100987void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
988 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800989 // All registers are assumed to be correctly set up.
Vladimir Marko4ee8e292017-06-02 15:39:30 +0000990
Vladimir Marko58155012015-08-19 12:49:41 +0000991 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
992 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100993 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000994 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100995 uint32_t offset =
996 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Andreas Gampe3db70682018-12-26 15:12:03 -0800997 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000998 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100999 }
Vladimir Marko58155012015-08-19 12:49:41 +00001000 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00001001 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001002 break;
Vladimir Marko65979462017-05-19 17:25:12 +01001003 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01001004 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01001005 __ leal(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001006 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001007 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01001008 break;
Vladimir Markob066d432018-01-03 13:14:37 +00001009 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
1010 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1011 __ movl(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001012 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001013 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001014 break;
1015 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001016 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001017 __ movq(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001018 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001019 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01001020 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko58155012015-08-19 12:49:41 +00001021 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001022 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001023 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
1024 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
1025 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001026 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
1027 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1028 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001029 }
Vladimir Marko58155012015-08-19 12:49:41 +00001030 }
1031
1032 switch (invoke->GetCodePtrLocation()) {
1033 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1034 __ call(&frame_entry_label_);
1035 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001036 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1037 // (callee_method + offset_of_quick_compiled_code)()
1038 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1039 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001040 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001041 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001042 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001043 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001044
1045 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001046}
1047
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001048void CodeGeneratorX86_64::GenerateVirtualCall(
1049 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001050 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1051 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1052 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001053
1054 // Use the calling convention instead of the location of the receiver, as
1055 // intrinsics may have put the receiver in a different register. In the intrinsics
1056 // slow path, the arguments have been moved to the right place, so here we are
1057 // guaranteed that the receiver is the first register of the calling convention.
1058 InvokeDexCallingConvention calling_convention;
1059 Register receiver = calling_convention.GetRegisterAt(0);
1060
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001061 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001062 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001063 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001064 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001065 // Instead of simply (possibly) unpoisoning `temp` here, we should
1066 // emit a read barrier for the previous class reference load.
1067 // However this is not required in practice, as this is an
1068 // intermediate/temporary reference and because the current
1069 // concurrent copying collector keeps the from-space memory
1070 // intact/accessible until the end of the marking phase (the
1071 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001072 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001073
Nicolas Geoffray20036d82019-11-28 16:15:00 +00001074 MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001075
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001076 // temp = temp->GetMethodAt(method_offset);
1077 __ movq(temp, Address(temp, method_offset));
1078 // call temp->GetEntryPoint();
1079 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001080 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001081 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001082}
1083
Vladimir Marko6fd16062018-06-26 11:02:04 +01001084void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001085 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, intrinsic_data);
1086 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001087}
1088
Vladimir Markob066d432018-01-03 13:14:37 +00001089void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001090 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, boot_image_offset);
1091 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00001092}
1093
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001094void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
1095 boot_image_method_patches_.emplace_back(
1096 invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001097 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001098}
1099
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001100void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
1101 method_bss_entry_patches_.emplace_back(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
1102 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001103}
1104
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001105void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1106 boot_image_type_patches_.emplace_back(
1107 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001108 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001109}
1110
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001111Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001112 type_bss_entry_patches_.emplace_back(
1113 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001114 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001115}
1116
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001117void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1118 boot_image_string_patches_.emplace_back(
1119 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1120 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001121}
1122
Vladimir Markoaad75c62016-10-03 08:46:48 +00001123Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001124 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001125 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001126 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001127}
1128
Vladimir Marko6fd16062018-06-26 11:02:04 +01001129void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
1130 if (GetCompilerOptions().IsBootImage()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001131 __ leal(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001132 RecordBootImageIntrinsicPatch(boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001133 } else if (GetCompilerOptions().GetCompilePic()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001134 __ movl(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001135 RecordBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001136 } else {
Vladimir Marko695348f2020-05-19 14:42:02 +01001137 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001138 gc::Heap* heap = Runtime::Current()->GetHeap();
1139 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001140 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01001141 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
1142 }
1143}
1144
Vladimir Marko6fd16062018-06-26 11:02:04 +01001145void CodeGeneratorX86_64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1146 uint32_t boot_image_offset) {
1147 DCHECK(invoke->IsStatic());
1148 InvokeRuntimeCallingConvention calling_convention;
1149 CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0));
1150 if (GetCompilerOptions().IsBootImage()) {
1151 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1152 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1153 __ leal(argument,
Andreas Gampe3db70682018-12-26 15:12:03 -08001154 Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001155 MethodReference target_method = invoke->GetTargetMethod();
1156 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1157 boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_);
1158 __ Bind(&boot_image_type_patches_.back().label);
1159 } else {
1160 LoadBootImageAddress(argument, boot_image_offset);
1161 }
1162 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1163 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1164}
1165
Vladimir Markoaad75c62016-10-03 08:46:48 +00001166// The label points to the end of the "movl" or another instruction but the literal offset
1167// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1168constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1169
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001170template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001171inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1172 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001173 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001174 for (const PatchInfo<Label>& info : infos) {
1175 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1176 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001177 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001178 }
1179}
1180
Vladimir Marko6fd16062018-06-26 11:02:04 +01001181template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1182linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1183 const DexFile* target_dex_file,
1184 uint32_t pc_insn_offset,
1185 uint32_t boot_image_offset) {
1186 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1187 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001188}
1189
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001190void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001191 DCHECK(linker_patches->empty());
1192 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001193 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001194 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001195 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001196 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001197 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001198 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01001199 boot_image_other_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001200 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01001201 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001202 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1203 boot_image_method_patches_, linker_patches);
1204 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1205 boot_image_type_patches_, linker_patches);
1206 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001207 boot_image_string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001208 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001209 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001210 DCHECK(boot_image_type_patches_.empty());
1211 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01001212 }
1213 if (GetCompilerOptions().IsBootImage()) {
1214 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1215 boot_image_other_patches_, linker_patches);
1216 } else {
1217 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
1218 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001219 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001220 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1221 method_bss_entry_patches_, linker_patches);
1222 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1223 type_bss_entry_patches_, linker_patches);
1224 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1225 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001226 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001227}
1228
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001229void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001230 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001231}
1232
1233void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001234 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001235}
1236
Vladimir Markoa0431112018-06-25 09:32:54 +01001237const X86_64InstructionSetFeatures& CodeGeneratorX86_64::GetInstructionSetFeatures() const {
1238 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86_64InstructionSetFeatures();
1239}
1240
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001241size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1242 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1243 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001244}
1245
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001246size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1247 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1248 return kX86_64WordSize;
1249}
1250
1251size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001252 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001253 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001254 } else {
1255 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1256 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001257 return GetSlowPathFPWidth();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001258}
1259
1260size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001261 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001262 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001263 } else {
1264 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1265 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001266 return GetSlowPathFPWidth();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001267}
1268
Calin Juravle175dc732015-08-25 15:42:32 +01001269void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1270 HInstruction* instruction,
1271 uint32_t dex_pc,
1272 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001273 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001274 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1275 if (EntrypointRequiresStackMap(entrypoint)) {
1276 RecordPcInfo(instruction, dex_pc, slow_path);
1277 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001278}
1279
Roland Levillaindec8f632016-07-22 17:10:06 +01001280void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1281 HInstruction* instruction,
1282 SlowPathCode* slow_path) {
1283 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001284 GenerateInvokeRuntime(entry_point_offset);
1285}
1286
1287void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001288 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip= */ true));
Roland Levillaindec8f632016-07-22 17:10:06 +01001289}
1290
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001291static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001292// Use a fake return address register to mimic Quick.
1293static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001294CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001295 const CompilerOptions& compiler_options,
1296 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001297 : CodeGenerator(graph,
1298 kNumberOfCpuRegisters,
1299 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001300 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001301 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1302 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001303 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001304 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1305 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001306 compiler_options,
1307 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001308 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001309 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001310 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001311 move_resolver_(graph->GetAllocator(), this),
1312 assembler_(graph->GetAllocator()),
Vladimir Marko58155012015-08-19 12:49:41 +00001313 constant_area_start_(0),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001314 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1315 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1316 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1317 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001318 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001319 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001320 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001321 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1322 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1323 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001324 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1325}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001326
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001327InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1328 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001329 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001330 assembler_(codegen->GetAssembler()),
1331 codegen_(codegen) {}
1332
David Brazdil58282f42016-01-14 12:45:10 +00001333void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001334 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001335 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001336
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001337 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001338 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001339}
1340
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001341static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001342 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001343}
David Srbecky9d8606d2015-04-12 09:35:32 +01001344
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001345static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001346 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001347}
1348
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001349void CodeGeneratorX86_64::MaybeIncrementHotness(bool is_frame_entry) {
1350 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1351 NearLabel overflow;
1352 Register method = kMethodRegisterArgument;
1353 if (!is_frame_entry) {
1354 CHECK(RequiresCurrentMethod());
1355 method = TMP;
1356 __ movq(CpuRegister(method), Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1357 }
1358 __ cmpw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1359 Immediate(ArtMethod::MaxCounter()));
1360 __ j(kEqual, &overflow);
1361 __ addw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1362 Immediate(1));
1363 __ Bind(&overflow);
1364 }
1365
1366 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
1367 ScopedObjectAccess soa(Thread::Current());
1368 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00001369 if (info != nullptr) {
1370 uint64_t address = reinterpret_cast64<uint64_t>(info);
1371 NearLabel done;
1372 __ movq(CpuRegister(TMP), Immediate(address));
1373 __ addw(Address(CpuRegister(TMP), ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
1374 Immediate(1));
1375 __ j(kCarryClear, &done);
1376 if (HasEmptyFrame()) {
1377 CHECK(is_frame_entry);
1378 // Frame alignment, and the stub expects the method on the stack.
1379 __ pushq(CpuRegister(RDI));
1380 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1381 __ cfi().RelOffset(DWARFReg(RDI), 0);
1382 } else if (!RequiresCurrentMethod()) {
1383 CHECK(is_frame_entry);
1384 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
1385 }
1386 GenerateInvokeRuntime(
1387 GetThreadOffset<kX86_64PointerSize>(kQuickCompileOptimized).Int32Value());
1388 if (HasEmptyFrame()) {
1389 __ popq(CpuRegister(RDI));
1390 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1391 __ cfi().Restore(DWARFReg(RDI));
1392 }
1393 __ Bind(&done);
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001394 }
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001395 }
1396}
1397
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001398void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001399 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001400 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001401 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001402 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001403 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001404
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001405
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001406 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001407 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1408 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001409 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001410 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001411
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001412 if (!HasEmptyFrame()) {
1413 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1414 Register reg = kCoreCalleeSaves[i];
1415 if (allocated_registers_.ContainsCoreRegister(reg)) {
1416 __ pushq(CpuRegister(reg));
1417 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1418 __ cfi().RelOffset(DWARFReg(reg), 0);
1419 }
1420 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001421
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001422 int adjust = GetFrameSize() - GetCoreSpillSize();
1423 __ subq(CpuRegister(RSP), Immediate(adjust));
1424 __ cfi().AdjustCFAOffset(adjust);
1425 uint32_t xmm_spill_location = GetFpuSpillStart();
1426 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
1427
1428 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1429 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1430 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1431 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1432 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
1433 }
1434 }
1435
1436 // Save the current method if we need it. Note that we do not
1437 // do this in HCurrentMethod, as the instruction might have been removed
1438 // in the SSA graph.
1439 if (RequiresCurrentMethod()) {
1440 CHECK(!HasEmptyFrame());
1441 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1442 CpuRegister(kMethodRegisterArgument));
1443 }
1444
1445 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1446 CHECK(!HasEmptyFrame());
1447 // Initialize should_deoptimize flag to 0.
1448 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
Nicolas Geoffray98893962015-01-21 12:32:32 +00001449 }
1450 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001451
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001452 MaybeIncrementHotness(/* is_frame_entry= */ true);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001453}
1454
1455void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001456 __ cfi().RememberState();
1457 if (!HasEmptyFrame()) {
1458 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001459 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
David Srbeckyc34dc932015-04-12 09:27:43 +01001460 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1461 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1462 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1463 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1464 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1465 }
1466 }
1467
1468 int adjust = GetFrameSize() - GetCoreSpillSize();
1469 __ addq(CpuRegister(RSP), Immediate(adjust));
1470 __ cfi().AdjustCFAOffset(-adjust);
1471
1472 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1473 Register reg = kCoreCalleeSaves[i];
1474 if (allocated_registers_.ContainsCoreRegister(reg)) {
1475 __ popq(CpuRegister(reg));
1476 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1477 __ cfi().Restore(DWARFReg(reg));
1478 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001479 }
1480 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001481 __ ret();
1482 __ cfi().RestoreState();
1483 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001484}
1485
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001486void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1487 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001488}
1489
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001490void CodeGeneratorX86_64::Move(Location destination, Location source) {
1491 if (source.Equals(destination)) {
1492 return;
1493 }
1494 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001495 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001496 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001497 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001498 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001499 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001500 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001501 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1502 } else if (source.IsConstant()) {
1503 HConstant* constant = source.GetConstant();
1504 if (constant->IsLongConstant()) {
1505 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1506 } else {
1507 Load32BitValue(dest, GetInt32ValueOf(constant));
1508 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001509 } else {
1510 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001511 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001512 }
1513 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001514 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001515 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001516 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001517 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001518 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1519 } else if (source.IsConstant()) {
1520 HConstant* constant = source.GetConstant();
1521 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1522 if (constant->IsFloatConstant()) {
1523 Load32BitValue(dest, static_cast<int32_t>(value));
1524 } else {
1525 Load64BitValue(dest, value);
1526 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001527 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001528 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001529 } else {
1530 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001531 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001532 }
1533 } else if (destination.IsStackSlot()) {
1534 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001535 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001536 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001537 } else if (source.IsFpuRegister()) {
1538 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001539 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001540 } else if (source.IsConstant()) {
1541 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001542 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001543 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001544 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001545 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001546 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1547 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001548 }
1549 } else {
1550 DCHECK(destination.IsDoubleStackSlot());
1551 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001552 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001553 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001554 } else if (source.IsFpuRegister()) {
1555 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001556 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001557 } else if (source.IsConstant()) {
1558 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001559 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1560 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001561 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001562 } else {
1563 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001564 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1565 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001566 }
1567 }
1568}
1569
Calin Juravle175dc732015-08-25 15:42:32 +01001570void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1571 DCHECK(location.IsRegister());
1572 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1573}
1574
Calin Juravlee460d1d2015-09-29 04:52:17 +01001575void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001576 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001577 Move(dst, src);
1578}
1579
1580void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1581 if (location.IsRegister()) {
1582 locations->AddTemp(location);
1583 } else {
1584 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1585 }
1586}
1587
David Brazdilfc6a86a2015-06-26 10:33:45 +00001588void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001589 if (successor->IsExitBlock()) {
1590 DCHECK(got->GetPrevious()->AlwaysThrows());
1591 return; // no code needed
1592 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001593
1594 HBasicBlock* block = got->GetBlock();
1595 HInstruction* previous = got->GetPrevious();
1596
1597 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001598 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001599 codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001600 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1601 return;
1602 }
1603
1604 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1605 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1606 }
1607 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001608 __ jmp(codegen_->GetLabelOf(successor));
1609 }
1610}
1611
David Brazdilfc6a86a2015-06-26 10:33:45 +00001612void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1613 got->SetLocations(nullptr);
1614}
1615
1616void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1617 HandleGoto(got, got->GetSuccessor());
1618}
1619
1620void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1621 try_boundary->SetLocations(nullptr);
1622}
1623
1624void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1625 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1626 if (!successor->IsExitBlock()) {
1627 HandleGoto(try_boundary, successor);
1628 }
1629}
1630
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001631void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1632 exit->SetLocations(nullptr);
1633}
1634
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001635void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001636}
1637
Mark Mendell152408f2015-12-31 12:28:50 -05001638template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001639void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001640 LabelType* true_label,
1641 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001642 if (cond->IsFPConditionTrueIfNaN()) {
1643 __ j(kUnordered, true_label);
1644 } else if (cond->IsFPConditionFalseIfNaN()) {
1645 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001646 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001647 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001648}
1649
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001650void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001651 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001652
Mark Mendellc4701932015-04-10 13:18:51 -04001653 Location left = locations->InAt(0);
1654 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001655 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001656 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001657 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001658 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001659 case DataType::Type::kInt8:
1660 case DataType::Type::kUint16:
1661 case DataType::Type::kInt16:
1662 case DataType::Type::kInt32:
1663 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001664 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001665 break;
1666 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001667 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001668 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001669 break;
1670 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001671 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001672 if (right.IsFpuRegister()) {
1673 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1674 } else if (right.IsConstant()) {
1675 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1676 codegen_->LiteralFloatAddress(
1677 right.GetConstant()->AsFloatConstant()->GetValue()));
1678 } else {
1679 DCHECK(right.IsStackSlot());
1680 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1681 Address(CpuRegister(RSP), right.GetStackIndex()));
1682 }
Mark Mendellc4701932015-04-10 13:18:51 -04001683 break;
1684 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001685 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001686 if (right.IsFpuRegister()) {
1687 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1688 } else if (right.IsConstant()) {
1689 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1690 codegen_->LiteralDoubleAddress(
1691 right.GetConstant()->AsDoubleConstant()->GetValue()));
1692 } else {
1693 DCHECK(right.IsDoubleStackSlot());
1694 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1695 Address(CpuRegister(RSP), right.GetStackIndex()));
1696 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001697 break;
1698 }
1699 default:
1700 LOG(FATAL) << "Unexpected condition type " << type;
1701 }
1702}
1703
1704template<class LabelType>
1705void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1706 LabelType* true_target_in,
1707 LabelType* false_target_in) {
1708 // Generated branching requires both targets to be explicit. If either of the
1709 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1710 LabelType fallthrough_target;
1711 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1712 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1713
1714 // Generate the comparison to set the CC.
1715 GenerateCompareTest(condition);
1716
1717 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001718 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001719 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001720 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001721 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1722 break;
1723 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001724 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001725 GenerateFPJumps(condition, true_target, false_target);
1726 break;
1727 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001728 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001729 GenerateFPJumps(condition, true_target, false_target);
1730 break;
1731 }
1732 default:
1733 LOG(FATAL) << "Unexpected condition type " << type;
1734 }
1735
David Brazdil0debae72015-11-12 18:37:00 +00001736 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001737 __ jmp(false_target);
1738 }
David Brazdil0debae72015-11-12 18:37:00 +00001739
1740 if (fallthrough_target.IsLinked()) {
1741 __ Bind(&fallthrough_target);
1742 }
Mark Mendellc4701932015-04-10 13:18:51 -04001743}
1744
David Brazdil0debae72015-11-12 18:37:00 +00001745static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1746 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1747 // are set only strictly before `branch`. We can't use the eflags on long
1748 // conditions if they are materialized due to the complex branching.
1749 return cond->IsCondition() &&
1750 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001751 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001752}
1753
Mark Mendell152408f2015-12-31 12:28:50 -05001754template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001755void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001756 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001757 LabelType* true_target,
1758 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001759 HInstruction* cond = instruction->InputAt(condition_input_index);
1760
1761 if (true_target == nullptr && false_target == nullptr) {
1762 // Nothing to do. The code always falls through.
1763 return;
1764 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001765 // Constant condition, statically compared against "true" (integer value 1).
1766 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001767 if (true_target != nullptr) {
1768 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001769 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001770 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001771 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001772 if (false_target != nullptr) {
1773 __ jmp(false_target);
1774 }
1775 }
1776 return;
1777 }
1778
1779 // The following code generates these patterns:
1780 // (1) true_target == nullptr && false_target != nullptr
1781 // - opposite condition true => branch to false_target
1782 // (2) true_target != nullptr && false_target == nullptr
1783 // - condition true => branch to true_target
1784 // (3) true_target != nullptr && false_target != nullptr
1785 // - condition true => branch to true_target
1786 // - branch to false_target
1787 if (IsBooleanValueOrMaterializedCondition(cond)) {
1788 if (AreEflagsSetFrom(cond, instruction)) {
1789 if (true_target == nullptr) {
1790 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1791 } else {
1792 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1793 }
1794 } else {
1795 // Materialized condition, compare against 0.
1796 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1797 if (lhs.IsRegister()) {
1798 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1799 } else {
1800 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1801 }
1802 if (true_target == nullptr) {
1803 __ j(kEqual, false_target);
1804 } else {
1805 __ j(kNotEqual, true_target);
1806 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001807 }
1808 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001809 // Condition has not been materialized, use its inputs as the
1810 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001811 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001812
David Brazdil0debae72015-11-12 18:37:00 +00001813 // If this is a long or FP comparison that has been folded into
1814 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001815 DataType::Type type = condition->InputAt(0)->GetType();
1816 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001817 GenerateCompareTestAndBranch(condition, true_target, false_target);
1818 return;
1819 }
1820
1821 Location lhs = condition->GetLocations()->InAt(0);
1822 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001823 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001824 if (true_target == nullptr) {
1825 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1826 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001827 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001828 }
Dave Allison20dfc792014-06-16 20:44:29 -07001829 }
David Brazdil0debae72015-11-12 18:37:00 +00001830
1831 // If neither branch falls through (case 3), the conditional branch to `true_target`
1832 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1833 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001834 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001835 }
1836}
1837
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001838void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001839 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001840 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001841 locations->SetInAt(0, Location::Any());
1842 }
1843}
1844
1845void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001846 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1847 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1848 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1849 nullptr : codegen_->GetLabelOf(true_successor);
1850 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1851 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001852 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001853}
1854
1855void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001856 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001857 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001858 InvokeRuntimeCallingConvention calling_convention;
1859 RegisterSet caller_saves = RegisterSet::Empty();
1860 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1861 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001862 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001863 locations->SetInAt(0, Location::Any());
1864 }
1865}
1866
1867void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001868 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001869 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001870 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001871 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001872 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001873}
1874
Mingyao Yang063fc772016-08-02 11:02:54 -07001875void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001876 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001877 LocationSummary(flag, LocationSummary::kNoCall);
1878 locations->SetOut(Location::RequiresRegister());
1879}
1880
1881void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1882 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1883 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1884}
1885
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001886static bool SelectCanUseCMOV(HSelect* select) {
1887 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001888 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001889 return false;
1890 }
1891
1892 // A FP condition doesn't generate the single CC that we need.
1893 HInstruction* condition = select->GetCondition();
1894 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001895 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001896 return false;
1897 }
1898
1899 // We can generate a CMOV for this Select.
1900 return true;
1901}
1902
David Brazdil74eb1b22015-12-14 11:44:01 +00001903void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001904 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001905 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001906 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001907 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001908 } else {
1909 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001910 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001911 if (select->InputAt(1)->IsConstant()) {
1912 locations->SetInAt(1, Location::RequiresRegister());
1913 } else {
1914 locations->SetInAt(1, Location::Any());
1915 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001916 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001917 locations->SetInAt(1, Location::Any());
1918 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001919 }
1920 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1921 locations->SetInAt(2, Location::RequiresRegister());
1922 }
1923 locations->SetOut(Location::SameAsFirstInput());
1924}
1925
1926void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1927 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001928 if (SelectCanUseCMOV(select)) {
1929 // If both the condition and the source types are integer, we can generate
1930 // a CMOV to implement Select.
1931 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001932 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001933 DCHECK(locations->InAt(0).Equals(locations->Out()));
1934
1935 HInstruction* select_condition = select->GetCondition();
1936 Condition cond = kNotEqual;
1937
1938 // Figure out how to test the 'condition'.
1939 if (select_condition->IsCondition()) {
1940 HCondition* condition = select_condition->AsCondition();
1941 if (!condition->IsEmittedAtUseSite()) {
1942 // This was a previously materialized condition.
1943 // Can we use the existing condition code?
1944 if (AreEflagsSetFrom(condition, select)) {
1945 // Materialization was the previous instruction. Condition codes are right.
1946 cond = X86_64IntegerCondition(condition->GetCondition());
1947 } else {
1948 // No, we have to recreate the condition code.
1949 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1950 __ testl(cond_reg, cond_reg);
1951 }
1952 } else {
1953 GenerateCompareTest(condition);
1954 cond = X86_64IntegerCondition(condition->GetCondition());
1955 }
1956 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001957 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001958 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1959 __ testl(cond_reg, cond_reg);
1960 }
1961
1962 // If the condition is true, overwrite the output, which already contains false.
1963 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001964 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001965 if (value_true_loc.IsRegister()) {
1966 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1967 } else {
1968 __ cmov(cond,
1969 value_false,
1970 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1971 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001972 } else {
1973 NearLabel false_target;
1974 GenerateTestAndBranch<NearLabel>(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08001975 /* condition_input_index= */ 2,
1976 /* true_target= */ nullptr,
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001977 &false_target);
1978 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1979 __ Bind(&false_target);
1980 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001981}
1982
David Srbecky0cf44932015-12-09 14:09:59 +00001983void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001984 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001985}
1986
David Srbeckyd28f4a02016-03-14 17:14:24 +00001987void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1988 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001989}
1990
1991void CodeGeneratorX86_64::GenerateNop() {
1992 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001993}
1994
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001995void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001996 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001997 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001998 // Handle the long/FP comparisons made in instruction simplification.
1999 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002000 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002001 locations->SetInAt(0, Location::RequiresRegister());
2002 locations->SetInAt(1, Location::Any());
2003 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002004 case DataType::Type::kFloat32:
2005 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04002006 locations->SetInAt(0, Location::RequiresFpuRegister());
2007 locations->SetInAt(1, Location::Any());
2008 break;
2009 default:
2010 locations->SetInAt(0, Location::RequiresRegister());
2011 locations->SetInAt(1, Location::Any());
2012 break;
2013 }
David Brazdilb3e773e2016-01-26 11:28:37 +00002014 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01002015 locations->SetOut(Location::RequiresRegister());
2016 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002017}
2018
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002019void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002020 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002021 return;
Dave Allison20dfc792014-06-16 20:44:29 -07002022 }
Mark Mendellc4701932015-04-10 13:18:51 -04002023
2024 LocationSummary* locations = cond->GetLocations();
2025 Location lhs = locations->InAt(0);
2026 Location rhs = locations->InAt(1);
2027 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05002028 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002029
2030 switch (cond->InputAt(0)->GetType()) {
2031 default:
2032 // Integer case.
2033
2034 // Clear output register: setcc only sets the low byte.
2035 __ xorl(reg, reg);
2036
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002037 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002038 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002039 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002040 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002041 // Clear output register: setcc only sets the low byte.
2042 __ xorl(reg, reg);
2043
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002044 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002045 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002046 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002047 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04002048 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2049 if (rhs.IsConstant()) {
2050 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
2051 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
2052 } else if (rhs.IsStackSlot()) {
2053 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2054 } else {
2055 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2056 }
2057 GenerateFPJumps(cond, &true_label, &false_label);
2058 break;
2059 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002060 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002061 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2062 if (rhs.IsConstant()) {
2063 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
2064 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
2065 } else if (rhs.IsDoubleStackSlot()) {
2066 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2067 } else {
2068 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2069 }
2070 GenerateFPJumps(cond, &true_label, &false_label);
2071 break;
2072 }
2073 }
2074
2075 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002076 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002077
Roland Levillain4fa13f62015-07-06 18:11:54 +01002078 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002079 __ Bind(&false_label);
2080 __ xorl(reg, reg);
2081 __ jmp(&done_label);
2082
Roland Levillain4fa13f62015-07-06 18:11:54 +01002083 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002084 __ Bind(&true_label);
2085 __ movl(reg, Immediate(1));
2086 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002087}
2088
2089void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002090 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002091}
2092
2093void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002094 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002095}
2096
2097void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002098 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002099}
2100
2101void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002102 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002103}
2104
2105void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002106 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002107}
2108
2109void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002110 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002111}
2112
2113void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002114 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002115}
2116
2117void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002118 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002119}
2120
2121void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002122 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002123}
2124
2125void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002126 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002127}
2128
2129void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002130 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002131}
2132
2133void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002134 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002135}
2136
Aart Bike9f37602015-10-09 11:15:55 -07002137void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002138 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002139}
2140
2141void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002142 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002143}
2144
2145void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002146 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002147}
2148
2149void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002150 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002151}
2152
2153void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002154 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002155}
2156
2157void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002158 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002159}
2160
2161void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002162 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002163}
2164
2165void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002166 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002167}
2168
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002169void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002170 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002171 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002172 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002173 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002174 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002175 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002176 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002177 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002178 case DataType::Type::kInt32:
2179 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002180 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002181 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002182 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2183 break;
2184 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002185 case DataType::Type::kFloat32:
2186 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002187 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002188 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002189 locations->SetOut(Location::RequiresRegister());
2190 break;
2191 }
2192 default:
2193 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2194 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002195}
2196
2197void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002198 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002199 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002200 Location left = locations->InAt(0);
2201 Location right = locations->InAt(1);
2202
Mark Mendell0c9497d2015-08-21 09:30:05 -04002203 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002204 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002205 Condition less_cond = kLess;
2206
Calin Juravleddb7df22014-11-25 20:56:51 +00002207 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002208 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002209 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002210 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002211 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002212 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002213 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002214 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002215 break;
2216 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002217 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002218 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002219 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002220 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002221 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002222 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2223 if (right.IsConstant()) {
2224 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2225 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2226 } else if (right.IsStackSlot()) {
2227 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2228 } else {
2229 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2230 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002231 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002232 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002233 break;
2234 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002235 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002236 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2237 if (right.IsConstant()) {
2238 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2239 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2240 } else if (right.IsDoubleStackSlot()) {
2241 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2242 } else {
2243 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2244 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002245 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002246 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002247 break;
2248 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002249 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002250 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002251 }
Aart Bika19616e2016-02-01 18:57:58 -08002252
Calin Juravleddb7df22014-11-25 20:56:51 +00002253 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002254 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002255 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002256
Calin Juravle91debbc2014-11-26 19:01:09 +00002257 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002258 __ movl(out, Immediate(1));
2259 __ jmp(&done);
2260
2261 __ Bind(&less);
2262 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002263
2264 __ Bind(&done);
2265}
2266
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002267void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002268 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002269 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002270 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002271}
2272
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002273void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002274 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002275}
2276
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002277void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2278 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002279 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002280 locations->SetOut(Location::ConstantLocation(constant));
2281}
2282
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002283void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002284 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002285}
2286
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002287void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002288 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002289 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002290 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002291}
2292
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002293void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002294 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002295}
2296
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002297void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2298 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002299 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002300 locations->SetOut(Location::ConstantLocation(constant));
2301}
2302
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002303void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002304 // Will be generated at use site.
2305}
2306
2307void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2308 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002309 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002310 locations->SetOut(Location::ConstantLocation(constant));
2311}
2312
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002313void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2314 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002315 // Will be generated at use site.
2316}
2317
Igor Murashkind01745e2017-04-05 16:40:31 -07002318void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2319 constructor_fence->SetLocations(nullptr);
2320}
2321
2322void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2323 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2324 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2325}
2326
Calin Juravle27df7582015-04-17 19:12:31 +01002327void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2328 memory_barrier->SetLocations(nullptr);
2329}
2330
2331void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002332 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002333}
2334
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002335void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2336 ret->SetLocations(nullptr);
2337}
2338
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002339void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002340 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002341}
2342
2343void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002344 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002345 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002346 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002347 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002348 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002349 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002350 case DataType::Type::kInt8:
2351 case DataType::Type::kUint16:
2352 case DataType::Type::kInt16:
2353 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002354 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002355 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002356 break;
2357
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002358 case DataType::Type::kFloat32:
2359 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002360 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002361 break;
2362
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002363 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002364 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002365 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002366}
2367
2368void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002369 switch (ret->InputAt(0)->GetType()) {
2370 case DataType::Type::kReference:
2371 case DataType::Type::kBool:
2372 case DataType::Type::kUint8:
2373 case DataType::Type::kInt8:
2374 case DataType::Type::kUint16:
2375 case DataType::Type::kInt16:
2376 case DataType::Type::kInt32:
2377 case DataType::Type::kInt64:
2378 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
2379 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002380
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002381 case DataType::Type::kFloat32: {
2382 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2383 XMM0);
2384 // To simplify callers of an OSR method, we put the return value in both
2385 // floating point and core register.
2386 if (GetGraph()->IsCompilingOsr()) {
2387 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ false);
2388 }
2389 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002390 }
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002391 case DataType::Type::kFloat64: {
2392 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2393 XMM0);
2394 // To simplify callers of an OSR method, we put the return value in both
2395 // floating point and core register.
2396 if (GetGraph()->IsCompilingOsr()) {
2397 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ true);
2398 }
2399 break;
2400 }
2401
2402 default:
2403 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002404 }
2405 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002406}
2407
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002408Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002409 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002410 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002411 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002412 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002413 case DataType::Type::kInt8:
2414 case DataType::Type::kUint16:
2415 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002416 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002417 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002418 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002419 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002420 return Location::RegisterLocation(RAX);
2421
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002422 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002423 return Location::NoLocation();
2424
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002425 case DataType::Type::kFloat64:
2426 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002427 return Location::FpuRegisterLocation(XMM0);
2428 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002429
2430 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002431}
2432
2433Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2434 return Location::RegisterLocation(kMethodRegisterArgument);
2435}
2436
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002437Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002438 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002439 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002440 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002441 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002442 case DataType::Type::kInt8:
2443 case DataType::Type::kUint16:
2444 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002445 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002446 uint32_t index = gp_index_++;
2447 stack_index_++;
2448 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002449 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002450 } else {
2451 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2452 }
2453 }
2454
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002455 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002456 uint32_t index = gp_index_;
2457 stack_index_ += 2;
2458 if (index < calling_convention.GetNumberOfRegisters()) {
2459 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002460 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002461 } else {
2462 gp_index_ += 2;
2463 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2464 }
2465 }
2466
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002467 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002468 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002469 stack_index_++;
2470 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002471 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002472 } else {
2473 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2474 }
2475 }
2476
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002477 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002478 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002479 stack_index_ += 2;
2480 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002481 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002482 } else {
2483 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2484 }
2485 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002486
Aart Bik66c158e2018-01-31 12:55:04 -08002487 case DataType::Type::kUint32:
2488 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002489 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002490 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002491 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002492 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002493 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002494}
2495
Calin Juravle175dc732015-08-25 15:42:32 +01002496void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2497 // The trampoline uses the same calling convention as dex calling conventions,
2498 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2499 // the method_idx.
2500 HandleInvoke(invoke);
2501}
2502
2503void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2504 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2505}
2506
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002507void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002508 // Explicit clinit checks triggered by static invokes must have been pruned by
2509 // art::PrepareForRegisterAllocation.
2510 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002511
Mark Mendellfb8d2792015-03-31 22:16:59 -04002512 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002513 if (intrinsic.TryDispatch(invoke)) {
2514 return;
2515 }
2516
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002517 HandleInvoke(invoke);
2518}
2519
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002520static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2521 if (invoke->GetLocations()->Intrinsified()) {
2522 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2523 intrinsic.Dispatch(invoke);
2524 return true;
2525 }
2526 return false;
2527}
2528
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002529void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002530 // Explicit clinit checks triggered by static invokes must have been pruned by
2531 // art::PrepareForRegisterAllocation.
2532 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002533
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002534 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2535 return;
2536 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002537
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002538 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002539 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002540 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002541}
2542
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002543void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002544 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002545 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002546}
2547
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002548void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002549 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002550 if (intrinsic.TryDispatch(invoke)) {
2551 return;
2552 }
2553
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002554 HandleInvoke(invoke);
2555}
2556
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002557void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002558 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2559 return;
2560 }
2561
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002562 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002563 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002564}
2565
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002566void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2567 HandleInvoke(invoke);
2568 // Add the hidden argument.
2569 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2570}
2571
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002572void CodeGeneratorX86_64::MaybeGenerateInlineCacheCheck(HInstruction* instruction,
2573 CpuRegister klass) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002574 DCHECK_EQ(RDI, klass.AsRegister());
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002575 // We know the destination of an intrinsic, so no need to record inline
2576 // caches.
2577 if (!instruction->GetLocations()->Intrinsified() &&
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00002578 GetGraph()->IsCompilingBaseline() &&
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002579 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002580 ScopedObjectAccess soa(Thread::Current());
2581 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
Nicolas Geoffray796aa2c2019-12-17 10:20:05 +00002582 if (info != nullptr) {
2583 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
2584 uint64_t address = reinterpret_cast64<uint64_t>(cache);
2585 NearLabel done;
2586 __ movq(CpuRegister(TMP), Immediate(address));
2587 // Fast path for a monomorphic cache.
2588 __ cmpl(Address(CpuRegister(TMP), InlineCache::ClassesOffset().Int32Value()), klass);
2589 __ j(kEqual, &done);
2590 GenerateInvokeRuntime(
2591 GetThreadOffset<kX86_64PointerSize>(kQuickUpdateInlineCache).Int32Value());
2592 __ Bind(&done);
2593 }
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002594 }
2595}
2596
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002597void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2598 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002599 LocationSummary* locations = invoke->GetLocations();
2600 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2601 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002602 Location receiver = locations->InAt(0);
2603 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2604
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002605 if (receiver.IsStackSlot()) {
2606 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002607 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002608 __ movl(temp, Address(temp, class_offset));
2609 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002610 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002611 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002612 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002613 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002614 // Instead of simply (possibly) unpoisoning `temp` here, we should
2615 // emit a read barrier for the previous class reference load.
2616 // However this is not required in practice, as this is an
2617 // intermediate/temporary reference and because the current
2618 // concurrent copying collector keeps the from-space memory
2619 // intact/accessible until the end of the marking phase (the
2620 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002621 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002622
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002623 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002624
2625 // Set the hidden argument. This is safe to do this here, as RAX
2626 // won't be modified thereafter, before the `call` instruction.
2627 // We also di it after MaybeGenerateInlineCache that may use RAX.
2628 DCHECK_EQ(RAX, hidden_reg.AsRegister());
2629 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
2630
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002631 // temp = temp->GetAddressOfIMT()
2632 __ movq(temp,
2633 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2634 // temp = temp->GetImtEntryAt(method_offset);
2635 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002636 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002637 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002638 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002639 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002640 __ call(Address(
2641 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002642
2643 DCHECK(!codegen_->IsLeafMethod());
2644 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2645}
2646
Orion Hodsonac141392017-01-13 11:53:47 +00002647void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2648 HandleInvoke(invoke);
2649}
2650
2651void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2652 codegen_->GenerateInvokePolymorphicCall(invoke);
2653}
2654
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002655void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2656 HandleInvoke(invoke);
2657}
2658
2659void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2660 codegen_->GenerateInvokeCustomCall(invoke);
2661}
2662
Roland Levillain88cb1752014-10-20 16:36:47 +01002663void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2664 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002665 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002666 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002667 case DataType::Type::kInt32:
2668 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002669 locations->SetInAt(0, Location::RequiresRegister());
2670 locations->SetOut(Location::SameAsFirstInput());
2671 break;
2672
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002673 case DataType::Type::kFloat32:
2674 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002675 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002676 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002677 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002678 break;
2679
2680 default:
2681 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2682 }
2683}
2684
2685void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2686 LocationSummary* locations = neg->GetLocations();
2687 Location out = locations->Out();
2688 Location in = locations->InAt(0);
2689 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002690 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002691 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002692 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002693 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002694 break;
2695
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002696 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002697 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002698 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002699 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002700 break;
2701
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002702 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002703 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002704 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002705 // Implement float negation with an exclusive or with value
2706 // 0x80000000 (mask for bit 31, representing the sign of a
2707 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002708 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002709 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002710 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002711 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002712
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002713 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002714 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002715 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002716 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002717 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002718 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002719 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002720 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002721 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002722 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002723
2724 default:
2725 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2726 }
2727}
2728
Roland Levillaindff1f282014-11-05 14:15:05 +00002729void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2730 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002731 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002732 DataType::Type result_type = conversion->GetResultType();
2733 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002734 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2735 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002736
Roland Levillaindff1f282014-11-05 14:15:05 +00002737 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002738 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002739 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002740 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002741 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002742 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2743 locations->SetInAt(0, Location::Any());
2744 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002745 break;
2746
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002747 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002748 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002749 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002750 locations->SetInAt(0, Location::Any());
2751 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2752 break;
2753
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002754 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002755 locations->SetInAt(0, Location::RequiresFpuRegister());
2756 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002757 break;
2758
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002759 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002760 locations->SetInAt(0, Location::RequiresFpuRegister());
2761 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002762 break;
2763
2764 default:
2765 LOG(FATAL) << "Unexpected type conversion from " << input_type
2766 << " to " << result_type;
2767 }
2768 break;
2769
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002770 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002771 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002772 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002773 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002774 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002775 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002776 case DataType::Type::kInt16:
2777 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002778 // TODO: We would benefit from a (to-be-implemented)
2779 // Location::RegisterOrStackSlot requirement for this input.
2780 locations->SetInAt(0, Location::RequiresRegister());
2781 locations->SetOut(Location::RequiresRegister());
2782 break;
2783
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002784 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002785 locations->SetInAt(0, Location::RequiresFpuRegister());
2786 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002787 break;
2788
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002789 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002790 locations->SetInAt(0, Location::RequiresFpuRegister());
2791 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002792 break;
2793
2794 default:
2795 LOG(FATAL) << "Unexpected type conversion from " << input_type
2796 << " to " << result_type;
2797 }
2798 break;
2799
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002800 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002801 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002802 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002803 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002804 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002805 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002806 case DataType::Type::kInt16:
2807 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002808 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002809 locations->SetOut(Location::RequiresFpuRegister());
2810 break;
2811
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002812 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002813 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002814 locations->SetOut(Location::RequiresFpuRegister());
2815 break;
2816
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002817 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002818 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002819 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002820 break;
2821
2822 default:
2823 LOG(FATAL) << "Unexpected type conversion from " << input_type
2824 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002825 }
Roland Levillaincff13742014-11-17 14:32:17 +00002826 break;
2827
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002828 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002829 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002830 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002831 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002832 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002833 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002834 case DataType::Type::kInt16:
2835 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002836 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002837 locations->SetOut(Location::RequiresFpuRegister());
2838 break;
2839
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002840 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002841 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002842 locations->SetOut(Location::RequiresFpuRegister());
2843 break;
2844
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002845 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002846 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002847 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002848 break;
2849
2850 default:
2851 LOG(FATAL) << "Unexpected type conversion from " << input_type
2852 << " to " << result_type;
2853 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002854 break;
2855
2856 default:
2857 LOG(FATAL) << "Unexpected type conversion from " << input_type
2858 << " to " << result_type;
2859 }
2860}
2861
2862void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2863 LocationSummary* locations = conversion->GetLocations();
2864 Location out = locations->Out();
2865 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002866 DataType::Type result_type = conversion->GetResultType();
2867 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002868 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2869 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002870 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002871 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002872 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002873 case DataType::Type::kInt8:
2874 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002875 case DataType::Type::kInt16:
2876 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002877 case DataType::Type::kInt64:
2878 if (in.IsRegister()) {
2879 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2880 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2881 __ movzxb(out.AsRegister<CpuRegister>(),
2882 Address(CpuRegister(RSP), in.GetStackIndex()));
2883 } else {
2884 __ movl(out.AsRegister<CpuRegister>(),
2885 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2886 }
2887 break;
2888
2889 default:
2890 LOG(FATAL) << "Unexpected type conversion from " << input_type
2891 << " to " << result_type;
2892 }
2893 break;
2894
2895 case DataType::Type::kInt8:
2896 switch (input_type) {
2897 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002898 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002899 case DataType::Type::kInt16:
2900 case DataType::Type::kInt32:
2901 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002902 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002903 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002904 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002905 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002906 Address(CpuRegister(RSP), in.GetStackIndex()));
2907 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002908 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002909 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002910 }
2911 break;
2912
2913 default:
2914 LOG(FATAL) << "Unexpected type conversion from " << input_type
2915 << " to " << result_type;
2916 }
2917 break;
2918
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002919 case DataType::Type::kUint16:
2920 switch (input_type) {
2921 case DataType::Type::kInt8:
2922 case DataType::Type::kInt16:
2923 case DataType::Type::kInt32:
2924 case DataType::Type::kInt64:
2925 if (in.IsRegister()) {
2926 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2927 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2928 __ movzxw(out.AsRegister<CpuRegister>(),
2929 Address(CpuRegister(RSP), in.GetStackIndex()));
2930 } else {
2931 __ movl(out.AsRegister<CpuRegister>(),
2932 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2933 }
2934 break;
2935
2936 default:
2937 LOG(FATAL) << "Unexpected type conversion from " << input_type
2938 << " to " << result_type;
2939 }
2940 break;
2941
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002942 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002943 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002944 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002945 case DataType::Type::kInt32:
2946 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00002947 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002948 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002949 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002950 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002951 Address(CpuRegister(RSP), in.GetStackIndex()));
2952 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002953 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002954 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002955 }
2956 break;
2957
2958 default:
2959 LOG(FATAL) << "Unexpected type conversion from " << input_type
2960 << " to " << result_type;
2961 }
2962 break;
2963
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002964 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002965 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002966 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002967 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002968 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002969 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002970 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002971 Address(CpuRegister(RSP), in.GetStackIndex()));
2972 } else {
2973 DCHECK(in.IsConstant());
2974 DCHECK(in.GetConstant()->IsLongConstant());
2975 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002976 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002977 }
2978 break;
2979
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002980 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002981 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2982 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002983 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002984
2985 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002986 // if input >= (float)INT_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07002987 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimIntMax)));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002988 __ j(kAboveEqual, &done);
2989 // if input == NaN goto nan
2990 __ j(kUnordered, &nan);
2991 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002992 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002993 __ jmp(&done);
2994 __ Bind(&nan);
2995 // output = 0
2996 __ xorl(output, output);
2997 __ Bind(&done);
2998 break;
2999 }
3000
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003001 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003002 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3003 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003004 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003005
3006 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04003007 // if input >= (double)INT_MAX goto done
3008 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003009 __ j(kAboveEqual, &done);
3010 // if input == NaN goto nan
3011 __ j(kUnordered, &nan);
3012 // output = double-to-int-truncate(input)
3013 __ cvttsd2si(output, input);
3014 __ jmp(&done);
3015 __ Bind(&nan);
3016 // output = 0
3017 __ xorl(output, output);
3018 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00003019 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003020 }
Roland Levillain946e1432014-11-11 17:35:19 +00003021
3022 default:
3023 LOG(FATAL) << "Unexpected type conversion from " << input_type
3024 << " to " << result_type;
3025 }
3026 break;
3027
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003028 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00003029 switch (input_type) {
3030 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003031 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003032 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003033 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003034 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003035 case DataType::Type::kInt16:
3036 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00003037 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003038 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00003039 break;
3040
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003041 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00003042 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3043 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003044 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00003045
Mark Mendell92e83bf2015-05-07 11:25:03 -04003046 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003047 // if input >= (float)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003048 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimLongMax)));
Roland Levillain624279f2014-12-04 11:54:28 +00003049 __ j(kAboveEqual, &done);
3050 // if input == NaN goto nan
3051 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003052 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003053 __ cvttss2si(output, input, true);
3054 __ jmp(&done);
3055 __ Bind(&nan);
3056 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003057 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00003058 __ Bind(&done);
3059 break;
3060 }
3061
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003062 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003063 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3064 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003065 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003066
Mark Mendell92e83bf2015-05-07 11:25:03 -04003067 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003068 // if input >= (double)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003069 __ comisd(input, codegen_->LiteralDoubleAddress(
3070 static_cast<double>(kPrimLongMax)));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003071 __ j(kAboveEqual, &done);
3072 // if input == NaN goto nan
3073 __ j(kUnordered, &nan);
3074 // output = double-to-long-truncate(input)
3075 __ cvttsd2si(output, input, true);
3076 __ jmp(&done);
3077 __ Bind(&nan);
3078 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003079 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003080 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00003081 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003082 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003083
3084 default:
3085 LOG(FATAL) << "Unexpected type conversion from " << input_type
3086 << " to " << result_type;
3087 }
3088 break;
3089
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003090 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003091 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003092 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003093 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003094 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003095 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003096 case DataType::Type::kInt16:
3097 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003098 if (in.IsRegister()) {
3099 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3100 } else if (in.IsConstant()) {
3101 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3102 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003103 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003104 } else {
3105 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3106 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3107 }
Roland Levillaincff13742014-11-17 14:32:17 +00003108 break;
3109
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003110 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003111 if (in.IsRegister()) {
3112 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3113 } else if (in.IsConstant()) {
3114 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3115 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06003116 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003117 } else {
3118 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3119 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3120 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003121 break;
3122
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003123 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003124 if (in.IsFpuRegister()) {
3125 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3126 } else if (in.IsConstant()) {
3127 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
3128 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003129 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003130 } else {
3131 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
3132 Address(CpuRegister(RSP), in.GetStackIndex()));
3133 }
Roland Levillaincff13742014-11-17 14:32:17 +00003134 break;
3135
3136 default:
3137 LOG(FATAL) << "Unexpected type conversion from " << input_type
3138 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003139 }
Roland Levillaincff13742014-11-17 14:32:17 +00003140 break;
3141
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003142 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003143 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003144 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003145 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003146 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003147 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003148 case DataType::Type::kInt16:
3149 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003150 if (in.IsRegister()) {
3151 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3152 } else if (in.IsConstant()) {
3153 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3154 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003155 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003156 } else {
3157 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3158 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3159 }
Roland Levillaincff13742014-11-17 14:32:17 +00003160 break;
3161
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003162 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003163 if (in.IsRegister()) {
3164 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3165 } else if (in.IsConstant()) {
3166 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3167 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003168 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003169 } else {
3170 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3171 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3172 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003173 break;
3174
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003175 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003176 if (in.IsFpuRegister()) {
3177 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3178 } else if (in.IsConstant()) {
3179 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3180 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003181 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003182 } else {
3183 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3184 Address(CpuRegister(RSP), in.GetStackIndex()));
3185 }
Roland Levillaincff13742014-11-17 14:32:17 +00003186 break;
3187
3188 default:
3189 LOG(FATAL) << "Unexpected type conversion from " << input_type
3190 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003191 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003192 break;
3193
3194 default:
3195 LOG(FATAL) << "Unexpected type conversion from " << input_type
3196 << " to " << result_type;
3197 }
3198}
3199
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003200void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003201 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003202 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003203 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003204 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003205 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003206 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3207 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003208 break;
3209 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003210
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003211 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003212 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003213 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003214 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003215 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003216 break;
3217 }
3218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003219 case DataType::Type::kFloat64:
3220 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003221 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003222 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003223 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003224 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003225 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003226
3227 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003228 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003229 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003230}
3231
3232void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3233 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003234 Location first = locations->InAt(0);
3235 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003236 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003237
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003238 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003239 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003240 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003241 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3242 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003243 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3244 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003245 } else {
3246 __ leal(out.AsRegister<CpuRegister>(), Address(
3247 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3248 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003249 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003250 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3251 __ addl(out.AsRegister<CpuRegister>(),
3252 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3253 } else {
3254 __ leal(out.AsRegister<CpuRegister>(), Address(
3255 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3256 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003257 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003258 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003259 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003260 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003261 break;
3262 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003263
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003264 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003265 if (second.IsRegister()) {
3266 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3267 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003268 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3269 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003270 } else {
3271 __ leaq(out.AsRegister<CpuRegister>(), Address(
3272 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3273 }
3274 } else {
3275 DCHECK(second.IsConstant());
3276 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3277 int32_t int32_value = Low32Bits(value);
3278 DCHECK_EQ(int32_value, value);
3279 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3280 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3281 } else {
3282 __ leaq(out.AsRegister<CpuRegister>(), Address(
3283 first.AsRegister<CpuRegister>(), int32_value));
3284 }
3285 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003286 break;
3287 }
3288
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003289 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003290 if (second.IsFpuRegister()) {
3291 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3292 } else if (second.IsConstant()) {
3293 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003294 codegen_->LiteralFloatAddress(
3295 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003296 } else {
3297 DCHECK(second.IsStackSlot());
3298 __ addss(first.AsFpuRegister<XmmRegister>(),
3299 Address(CpuRegister(RSP), second.GetStackIndex()));
3300 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003301 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003302 }
3303
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003304 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003305 if (second.IsFpuRegister()) {
3306 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3307 } else if (second.IsConstant()) {
3308 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003309 codegen_->LiteralDoubleAddress(
3310 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003311 } else {
3312 DCHECK(second.IsDoubleStackSlot());
3313 __ addsd(first.AsFpuRegister<XmmRegister>(),
3314 Address(CpuRegister(RSP), second.GetStackIndex()));
3315 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003316 break;
3317 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003318
3319 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003320 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003321 }
3322}
3323
3324void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003325 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003326 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003327 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003328 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003329 locations->SetInAt(0, Location::RequiresRegister());
3330 locations->SetInAt(1, Location::Any());
3331 locations->SetOut(Location::SameAsFirstInput());
3332 break;
3333 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003334 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003335 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003336 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003337 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003338 break;
3339 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003340 case DataType::Type::kFloat32:
3341 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003342 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003343 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003344 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003345 break;
Calin Juravle11351682014-10-23 15:38:15 +01003346 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003347 default:
Calin Juravle11351682014-10-23 15:38:15 +01003348 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003349 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003350}
3351
3352void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3353 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003354 Location first = locations->InAt(0);
3355 Location second = locations->InAt(1);
3356 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003357 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003358 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003359 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003360 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003361 } else if (second.IsConstant()) {
3362 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003363 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003364 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003365 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003366 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003367 break;
3368 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003369 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003370 if (second.IsConstant()) {
3371 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3372 DCHECK(IsInt<32>(value));
3373 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3374 } else {
3375 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3376 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003377 break;
3378 }
3379
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003380 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003381 if (second.IsFpuRegister()) {
3382 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3383 } else if (second.IsConstant()) {
3384 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003385 codegen_->LiteralFloatAddress(
3386 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003387 } else {
3388 DCHECK(second.IsStackSlot());
3389 __ subss(first.AsFpuRegister<XmmRegister>(),
3390 Address(CpuRegister(RSP), second.GetStackIndex()));
3391 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003392 break;
Calin Juravle11351682014-10-23 15:38:15 +01003393 }
3394
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003395 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003396 if (second.IsFpuRegister()) {
3397 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3398 } else if (second.IsConstant()) {
3399 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003400 codegen_->LiteralDoubleAddress(
3401 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003402 } else {
3403 DCHECK(second.IsDoubleStackSlot());
3404 __ subsd(first.AsFpuRegister<XmmRegister>(),
3405 Address(CpuRegister(RSP), second.GetStackIndex()));
3406 }
Calin Juravle11351682014-10-23 15:38:15 +01003407 break;
3408 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003409
3410 default:
Calin Juravle11351682014-10-23 15:38:15 +01003411 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003412 }
3413}
3414
Calin Juravle34bacdf2014-10-07 20:23:36 +01003415void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3416 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003417 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003418 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003419 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003420 locations->SetInAt(0, Location::RequiresRegister());
3421 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003422 if (mul->InputAt(1)->IsIntConstant()) {
3423 // Can use 3 operand multiply.
3424 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3425 } else {
3426 locations->SetOut(Location::SameAsFirstInput());
3427 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003428 break;
3429 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003430 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003431 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003432 locations->SetInAt(1, Location::Any());
3433 if (mul->InputAt(1)->IsLongConstant() &&
3434 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003435 // Can use 3 operand multiply.
3436 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3437 } else {
3438 locations->SetOut(Location::SameAsFirstInput());
3439 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003440 break;
3441 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003442 case DataType::Type::kFloat32:
3443 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003444 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003445 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003446 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003447 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003448 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003449
3450 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003451 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003452 }
3453}
3454
3455void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3456 LocationSummary* locations = mul->GetLocations();
3457 Location first = locations->InAt(0);
3458 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003459 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003460 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003461 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003462 // The constant may have ended up in a register, so test explicitly to avoid
3463 // problems where the output may not be the same as the first operand.
3464 if (mul->InputAt(1)->IsIntConstant()) {
3465 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3466 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3467 } else if (second.IsRegister()) {
3468 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003469 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003470 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003471 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003472 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003473 __ imull(first.AsRegister<CpuRegister>(),
3474 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003475 }
3476 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003477 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003478 // The constant may have ended up in a register, so test explicitly to avoid
3479 // problems where the output may not be the same as the first operand.
3480 if (mul->InputAt(1)->IsLongConstant()) {
3481 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3482 if (IsInt<32>(value)) {
3483 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3484 Immediate(static_cast<int32_t>(value)));
3485 } else {
3486 // Have to use the constant area.
3487 DCHECK(first.Equals(out));
3488 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3489 }
3490 } else if (second.IsRegister()) {
3491 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003492 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003493 } else {
3494 DCHECK(second.IsDoubleStackSlot());
3495 DCHECK(first.Equals(out));
3496 __ imulq(first.AsRegister<CpuRegister>(),
3497 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003498 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003499 break;
3500 }
3501
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003502 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003503 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003504 if (second.IsFpuRegister()) {
3505 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3506 } else if (second.IsConstant()) {
3507 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003508 codegen_->LiteralFloatAddress(
3509 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003510 } else {
3511 DCHECK(second.IsStackSlot());
3512 __ mulss(first.AsFpuRegister<XmmRegister>(),
3513 Address(CpuRegister(RSP), second.GetStackIndex()));
3514 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003515 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003516 }
3517
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003518 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003519 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003520 if (second.IsFpuRegister()) {
3521 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3522 } else if (second.IsConstant()) {
3523 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003524 codegen_->LiteralDoubleAddress(
3525 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003526 } else {
3527 DCHECK(second.IsDoubleStackSlot());
3528 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3529 Address(CpuRegister(RSP), second.GetStackIndex()));
3530 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003531 break;
3532 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003533
3534 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003535 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003536 }
3537}
3538
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003539void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3540 uint32_t stack_adjustment, bool is_float) {
3541 if (source.IsStackSlot()) {
3542 DCHECK(is_float);
3543 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3544 } else if (source.IsDoubleStackSlot()) {
3545 DCHECK(!is_float);
3546 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3547 } else {
3548 // Write the value to the temporary location on the stack and load to FP stack.
3549 if (is_float) {
3550 Location stack_temp = Location::StackSlot(temp_offset);
3551 codegen_->Move(stack_temp, source);
3552 __ flds(Address(CpuRegister(RSP), temp_offset));
3553 } else {
3554 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3555 codegen_->Move(stack_temp, source);
3556 __ fldl(Address(CpuRegister(RSP), temp_offset));
3557 }
3558 }
3559}
3560
3561void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003562 DataType::Type type = rem->GetResultType();
3563 bool is_float = type == DataType::Type::kFloat32;
3564 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003565 LocationSummary* locations = rem->GetLocations();
3566 Location first = locations->InAt(0);
3567 Location second = locations->InAt(1);
3568 Location out = locations->Out();
3569
3570 // Create stack space for 2 elements.
3571 // TODO: enhance register allocator to ask for stack temporaries.
3572 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3573
3574 // Load the values to the FP stack in reverse order, using temporaries if needed.
3575 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3576 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3577
3578 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003579 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003580 __ Bind(&retry);
3581 __ fprem();
3582
3583 // Move FP status to AX.
3584 __ fstsw();
3585
3586 // And see if the argument reduction is complete. This is signaled by the
3587 // C2 FPU flag bit set to 0.
3588 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3589 __ j(kNotEqual, &retry);
3590
3591 // We have settled on the final value. Retrieve it into an XMM register.
3592 // Store FP top of stack to real stack.
3593 if (is_float) {
3594 __ fsts(Address(CpuRegister(RSP), 0));
3595 } else {
3596 __ fstl(Address(CpuRegister(RSP), 0));
3597 }
3598
3599 // Pop the 2 items from the FP stack.
3600 __ fucompp();
3601
3602 // Load the value from the stack into an XMM register.
3603 DCHECK(out.IsFpuRegister()) << out;
3604 if (is_float) {
3605 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3606 } else {
3607 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3608 }
3609
3610 // And remove the temporary stack space we allocated.
3611 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3612}
3613
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003614void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3615 DCHECK(instruction->IsDiv() || instruction->IsRem());
3616
3617 LocationSummary* locations = instruction->GetLocations();
3618 Location second = locations->InAt(1);
3619 DCHECK(second.IsConstant());
3620
3621 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3622 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003623 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003624
3625 DCHECK(imm == 1 || imm == -1);
3626
3627 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003628 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003629 if (instruction->IsRem()) {
3630 __ xorl(output_register, output_register);
3631 } else {
3632 __ movl(output_register, input_register);
3633 if (imm == -1) {
3634 __ negl(output_register);
3635 }
3636 }
3637 break;
3638 }
3639
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003640 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003641 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003642 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003643 } else {
3644 __ movq(output_register, input_register);
3645 if (imm == -1) {
3646 __ negq(output_register);
3647 }
3648 }
3649 break;
3650 }
3651
3652 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003653 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003654 }
3655}
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303656void InstructionCodeGeneratorX86_64::RemByPowerOfTwo(HRem* instruction) {
3657 LocationSummary* locations = instruction->GetLocations();
3658 Location second = locations->InAt(1);
3659 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3660 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3661 int64_t imm = Int64FromConstant(second.GetConstant());
3662 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3663 uint64_t abs_imm = AbsOrMin(imm);
3664 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3665 if (instruction->GetResultType() == DataType::Type::kInt32) {
3666 NearLabel done;
3667 __ movl(out, numerator);
3668 __ andl(out, Immediate(abs_imm-1));
3669 __ j(Condition::kZero, &done);
3670 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3671 __ testl(numerator, numerator);
3672 __ cmov(Condition::kLess, out, tmp, false);
3673 __ Bind(&done);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003674
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303675 } else {
3676 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
3677 codegen_->Load64BitValue(tmp, abs_imm - 1);
3678 NearLabel done;
3679
3680 __ movq(out, numerator);
3681 __ andq(out, tmp);
3682 __ j(Condition::kZero, &done);
3683 __ movq(tmp, numerator);
3684 __ sarq(tmp, Immediate(63));
3685 __ shlq(tmp, Immediate(WhichPowerOf2(abs_imm)));
3686 __ orq(out, tmp);
3687 __ Bind(&done);
3688 }
3689}
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003690void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003691 LocationSummary* locations = instruction->GetLocations();
3692 Location second = locations->InAt(1);
3693
3694 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3695 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3696
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003697 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003698 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3699 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003700
3701 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3702
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003703 if (instruction->GetResultType() == DataType::Type::kInt32) {
Atul Bajaj1cc73292018-11-15 11:36:53 +05303704 // When denominator is equal to 2, we can add signed bit and numerator to tmp.
3705 // Below we are using addl instruction instead of cmov which give us 1 cycle benefit.
3706 if (abs_imm == 2) {
3707 __ leal(tmp, Address(numerator, 0));
3708 __ shrl(tmp, Immediate(31));
3709 __ addl(tmp, numerator);
3710 } else {
3711 __ leal(tmp, Address(numerator, abs_imm - 1));
3712 __ testl(numerator, numerator);
3713 __ cmov(kGreaterEqual, tmp, numerator);
3714 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003715 int shift = CTZ(imm);
3716 __ sarl(tmp, Immediate(shift));
3717
3718 if (imm < 0) {
3719 __ negl(tmp);
3720 }
3721
3722 __ movl(output_register, tmp);
3723 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003724 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003725 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
Atul Bajaj1cc73292018-11-15 11:36:53 +05303726 if (abs_imm == 2) {
3727 __ movq(rdx, numerator);
3728 __ shrq(rdx, Immediate(63));
3729 __ addq(rdx, numerator);
3730 } else {
3731 codegen_->Load64BitValue(rdx, abs_imm - 1);
3732 __ addq(rdx, numerator);
3733 __ testq(numerator, numerator);
3734 __ cmov(kGreaterEqual, rdx, numerator);
3735 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003736 int shift = CTZ(imm);
3737 __ sarq(rdx, Immediate(shift));
3738
3739 if (imm < 0) {
3740 __ negq(rdx);
3741 }
3742
3743 __ movq(output_register, rdx);
3744 }
3745}
3746
3747void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3748 DCHECK(instruction->IsDiv() || instruction->IsRem());
3749
3750 LocationSummary* locations = instruction->GetLocations();
3751 Location second = locations->InAt(1);
3752
3753 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3754 : locations->GetTemp(0).AsRegister<CpuRegister>();
3755 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3756 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3757 : locations->Out().AsRegister<CpuRegister>();
3758 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3759
3760 DCHECK_EQ(RAX, eax.AsRegister());
3761 DCHECK_EQ(RDX, edx.AsRegister());
3762 if (instruction->IsDiv()) {
3763 DCHECK_EQ(RAX, out.AsRegister());
3764 } else {
3765 DCHECK_EQ(RDX, out.AsRegister());
3766 }
3767
3768 int64_t magic;
3769 int shift;
3770
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003771 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003772 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003773 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3774
Andreas Gampe3db70682018-12-26 15:12:03 -08003775 CalculateMagicAndShiftForDivRem(imm, false /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003776
3777 __ movl(numerator, eax);
3778
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003779 __ movl(eax, Immediate(magic));
3780 __ imull(numerator);
3781
3782 if (imm > 0 && magic < 0) {
3783 __ addl(edx, numerator);
3784 } else if (imm < 0 && magic > 0) {
3785 __ subl(edx, numerator);
3786 }
3787
3788 if (shift != 0) {
3789 __ sarl(edx, Immediate(shift));
3790 }
3791
3792 __ movl(eax, edx);
3793 __ shrl(edx, Immediate(31));
3794 __ addl(edx, eax);
3795
3796 if (instruction->IsRem()) {
3797 __ movl(eax, numerator);
3798 __ imull(edx, Immediate(imm));
3799 __ subl(eax, edx);
3800 __ movl(edx, eax);
3801 } else {
3802 __ movl(eax, edx);
3803 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003804 } else {
3805 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3806
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003807 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003808
3809 CpuRegister rax = eax;
3810 CpuRegister rdx = edx;
3811
Andreas Gampe3db70682018-12-26 15:12:03 -08003812 CalculateMagicAndShiftForDivRem(imm, true /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003813
3814 // Save the numerator.
3815 __ movq(numerator, rax);
3816
3817 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003818 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003819
3820 // RDX:RAX = magic * numerator
3821 __ imulq(numerator);
3822
3823 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003824 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003825 __ addq(rdx, numerator);
3826 } else if (imm < 0 && magic > 0) {
3827 // RDX -= numerator
3828 __ subq(rdx, numerator);
3829 }
3830
3831 // Shift if needed.
3832 if (shift != 0) {
3833 __ sarq(rdx, Immediate(shift));
3834 }
3835
3836 // RDX += 1 if RDX < 0
3837 __ movq(rax, rdx);
3838 __ shrq(rdx, Immediate(63));
3839 __ addq(rdx, rax);
3840
3841 if (instruction->IsRem()) {
3842 __ movq(rax, numerator);
3843
3844 if (IsInt<32>(imm)) {
3845 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3846 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003847 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003848 }
3849
3850 __ subq(rax, rdx);
3851 __ movq(rdx, rax);
3852 } else {
3853 __ movq(rax, rdx);
3854 }
3855 }
3856}
3857
Calin Juravlebacfec32014-11-14 15:54:36 +00003858void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3859 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003860 DataType::Type type = instruction->GetResultType();
3861 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00003862
3863 bool is_div = instruction->IsDiv();
3864 LocationSummary* locations = instruction->GetLocations();
3865
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003866 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3867 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003868
Roland Levillain271ab9c2014-11-27 15:23:57 +00003869 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003870 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003871
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003872 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003873 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003874
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003875 if (imm == 0) {
3876 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3877 } else if (imm == 1 || imm == -1) {
3878 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303879 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3880 if (is_div) {
3881 DivByPowerOfTwo(instruction->AsDiv());
3882 } else {
3883 RemByPowerOfTwo(instruction->AsRem());
3884 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003885 } else {
3886 DCHECK(imm <= -2 || imm >= 2);
3887 GenerateDivRemWithAnyConstant(instruction);
3888 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003889 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003890 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003891 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003892 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003893 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003894
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003895 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3896 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3897 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3898 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003899 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003900 __ cmpl(second_reg, Immediate(-1));
3901 __ j(kEqual, slow_path->GetEntryLabel());
3902 // edx:eax <- sign-extended of eax
3903 __ cdq();
3904 // eax = quotient, edx = remainder
3905 __ idivl(second_reg);
3906 } else {
3907 __ cmpq(second_reg, Immediate(-1));
3908 __ j(kEqual, slow_path->GetEntryLabel());
3909 // rdx:rax <- sign-extended of rax
3910 __ cqo();
3911 // rax = quotient, rdx = remainder
3912 __ idivq(second_reg);
3913 }
3914 __ Bind(slow_path->GetExitLabel());
3915 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003916}
3917
Calin Juravle7c4954d2014-10-28 16:57:40 +00003918void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3919 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003920 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003921 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003922 case DataType::Type::kInt32:
3923 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00003924 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003925 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003926 locations->SetOut(Location::SameAsFirstInput());
3927 // Intel uses edx:eax as the dividend.
3928 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003929 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3930 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3931 // output and request another temp.
3932 if (div->InputAt(1)->IsConstant()) {
3933 locations->AddTemp(Location::RequiresRegister());
3934 }
Calin Juravled0d48522014-11-04 16:40:20 +00003935 break;
3936 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003937
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003938 case DataType::Type::kFloat32:
3939 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003940 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003941 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003942 locations->SetOut(Location::SameAsFirstInput());
3943 break;
3944 }
3945
3946 default:
3947 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3948 }
3949}
3950
3951void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3952 LocationSummary* locations = div->GetLocations();
3953 Location first = locations->InAt(0);
3954 Location second = locations->InAt(1);
3955 DCHECK(first.Equals(locations->Out()));
3956
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003957 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003958 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003959 case DataType::Type::kInt32:
3960 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003961 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003962 break;
3963 }
3964
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003965 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003966 if (second.IsFpuRegister()) {
3967 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3968 } else if (second.IsConstant()) {
3969 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003970 codegen_->LiteralFloatAddress(
3971 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003972 } else {
3973 DCHECK(second.IsStackSlot());
3974 __ divss(first.AsFpuRegister<XmmRegister>(),
3975 Address(CpuRegister(RSP), second.GetStackIndex()));
3976 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003977 break;
3978 }
3979
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003980 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003981 if (second.IsFpuRegister()) {
3982 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3983 } else if (second.IsConstant()) {
3984 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003985 codegen_->LiteralDoubleAddress(
3986 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003987 } else {
3988 DCHECK(second.IsDoubleStackSlot());
3989 __ divsd(first.AsFpuRegister<XmmRegister>(),
3990 Address(CpuRegister(RSP), second.GetStackIndex()));
3991 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003992 break;
3993 }
3994
3995 default:
3996 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3997 }
3998}
3999
Calin Juravlebacfec32014-11-14 15:54:36 +00004000void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004001 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004002 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004003 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004004
4005 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004006 case DataType::Type::kInt32:
4007 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004008 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004009 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00004010 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
4011 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004012 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
4013 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
4014 // output and request another temp.
4015 if (rem->InputAt(1)->IsConstant()) {
4016 locations->AddTemp(Location::RequiresRegister());
4017 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004018 break;
4019 }
4020
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004021 case DataType::Type::kFloat32:
4022 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004023 locations->SetInAt(0, Location::Any());
4024 locations->SetInAt(1, Location::Any());
4025 locations->SetOut(Location::RequiresFpuRegister());
4026 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00004027 break;
4028 }
4029
4030 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00004031 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00004032 }
4033}
4034
4035void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004036 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00004037 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004038 case DataType::Type::kInt32:
4039 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004040 GenerateDivRemIntegral(rem);
4041 break;
4042 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004043 case DataType::Type::kFloat32:
4044 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004045 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004046 break;
4047 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004048 default:
4049 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
4050 }
4051}
4052
Aart Bik1f8d51b2018-02-15 10:42:37 -08004053static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4054 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4055 switch (minmax->GetResultType()) {
4056 case DataType::Type::kInt32:
4057 case DataType::Type::kInt64:
4058 locations->SetInAt(0, Location::RequiresRegister());
4059 locations->SetInAt(1, Location::RequiresRegister());
4060 locations->SetOut(Location::SameAsFirstInput());
4061 break;
4062 case DataType::Type::kFloat32:
4063 case DataType::Type::kFloat64:
4064 locations->SetInAt(0, Location::RequiresFpuRegister());
4065 locations->SetInAt(1, Location::RequiresFpuRegister());
4066 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
4067 // the second input to be the output (we can simply swap inputs).
4068 locations->SetOut(Location::SameAsFirstInput());
4069 break;
4070 default:
4071 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4072 }
4073}
4074
Aart Bik351df3e2018-03-07 11:54:57 -08004075void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
4076 bool is_min,
4077 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004078 Location op1_loc = locations->InAt(0);
4079 Location op2_loc = locations->InAt(1);
4080
4081 // Shortcut for same input locations.
4082 if (op1_loc.Equals(op2_loc)) {
4083 // Can return immediately, as op1_loc == out_loc.
4084 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
4085 // a copy here.
4086 DCHECK(locations->Out().Equals(op1_loc));
4087 return;
4088 }
4089
4090 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4091 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
4092
4093 // (out := op1)
4094 // out <=? op2
4095 // if out is min jmp done
4096 // out := op2
4097 // done:
4098
4099 if (type == DataType::Type::kInt64) {
4100 __ cmpq(out, op2);
4101 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
4102 } else {
4103 DCHECK_EQ(type, DataType::Type::kInt32);
4104 __ cmpl(out, op2);
4105 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
4106 }
4107}
4108
4109void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
4110 bool is_min,
4111 DataType::Type type) {
4112 Location op1_loc = locations->InAt(0);
4113 Location op2_loc = locations->InAt(1);
4114 Location out_loc = locations->Out();
4115 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4116
4117 // Shortcut for same input locations.
4118 if (op1_loc.Equals(op2_loc)) {
4119 DCHECK(out_loc.Equals(op1_loc));
4120 return;
4121 }
4122
4123 // (out := op1)
4124 // out <=? op2
4125 // if Nan jmp Nan_label
4126 // if out is min jmp done
4127 // if op2 is min jmp op2_label
4128 // handle -0/+0
4129 // jmp done
4130 // Nan_label:
4131 // out := NaN
4132 // op2_label:
4133 // out := op2
4134 // done:
4135 //
4136 // This removes one jmp, but needs to copy one input (op1) to out.
4137 //
4138 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
4139
4140 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4141
4142 NearLabel nan, done, op2_label;
4143 if (type == DataType::Type::kFloat64) {
4144 __ ucomisd(out, op2);
4145 } else {
4146 DCHECK_EQ(type, DataType::Type::kFloat32);
4147 __ ucomiss(out, op2);
4148 }
4149
4150 __ j(Condition::kParityEven, &nan);
4151
4152 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4153 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4154
4155 // Handle 0.0/-0.0.
4156 if (is_min) {
4157 if (type == DataType::Type::kFloat64) {
4158 __ orpd(out, op2);
4159 } else {
4160 __ orps(out, op2);
4161 }
4162 } else {
4163 if (type == DataType::Type::kFloat64) {
4164 __ andpd(out, op2);
4165 } else {
4166 __ andps(out, op2);
4167 }
4168 }
4169 __ jmp(&done);
4170
4171 // NaN handling.
4172 __ Bind(&nan);
4173 if (type == DataType::Type::kFloat64) {
4174 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
4175 } else {
4176 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
4177 }
4178 __ jmp(&done);
4179
4180 // out := op2;
4181 __ Bind(&op2_label);
4182 if (type == DataType::Type::kFloat64) {
4183 __ movsd(out, op2);
4184 } else {
4185 __ movss(out, op2);
4186 }
4187
4188 // Done.
4189 __ Bind(&done);
4190}
4191
Aart Bik351df3e2018-03-07 11:54:57 -08004192void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4193 DataType::Type type = minmax->GetResultType();
4194 switch (type) {
4195 case DataType::Type::kInt32:
4196 case DataType::Type::kInt64:
4197 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4198 break;
4199 case DataType::Type::kFloat32:
4200 case DataType::Type::kFloat64:
4201 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4202 break;
4203 default:
4204 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4205 }
4206}
4207
Aart Bik1f8d51b2018-02-15 10:42:37 -08004208void LocationsBuilderX86_64::VisitMin(HMin* min) {
4209 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4210}
4211
4212void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004213 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004214}
4215
4216void LocationsBuilderX86_64::VisitMax(HMax* max) {
4217 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4218}
4219
4220void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004221 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004222}
4223
Aart Bik3dad3412018-02-28 12:01:46 -08004224void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4225 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4226 switch (abs->GetResultType()) {
4227 case DataType::Type::kInt32:
4228 case DataType::Type::kInt64:
4229 locations->SetInAt(0, Location::RequiresRegister());
4230 locations->SetOut(Location::SameAsFirstInput());
4231 locations->AddTemp(Location::RequiresRegister());
4232 break;
4233 case DataType::Type::kFloat32:
4234 case DataType::Type::kFloat64:
4235 locations->SetInAt(0, Location::RequiresFpuRegister());
4236 locations->SetOut(Location::SameAsFirstInput());
4237 locations->AddTemp(Location::RequiresFpuRegister());
4238 break;
4239 default:
4240 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4241 }
4242}
4243
4244void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4245 LocationSummary* locations = abs->GetLocations();
4246 switch (abs->GetResultType()) {
4247 case DataType::Type::kInt32: {
4248 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4249 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4250 // Create mask.
4251 __ movl(mask, out);
4252 __ sarl(mask, Immediate(31));
4253 // Add mask.
4254 __ addl(out, mask);
4255 __ xorl(out, mask);
4256 break;
4257 }
4258 case DataType::Type::kInt64: {
4259 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4260 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4261 // Create mask.
4262 __ movq(mask, out);
4263 __ sarq(mask, Immediate(63));
4264 // Add mask.
4265 __ addq(out, mask);
4266 __ xorq(out, mask);
4267 break;
4268 }
4269 case DataType::Type::kFloat32: {
4270 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4271 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4272 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4273 __ andps(out, mask);
4274 break;
4275 }
4276 case DataType::Type::kFloat64: {
4277 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4278 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4279 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4280 __ andpd(out, mask);
4281 break;
4282 }
4283 default:
4284 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4285 }
4286}
4287
Calin Juravled0d48522014-11-04 16:40:20 +00004288void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004289 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004290 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004291}
4292
4293void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004294 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004295 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004296 codegen_->AddSlowPath(slow_path);
4297
4298 LocationSummary* locations = instruction->GetLocations();
4299 Location value = locations->InAt(0);
4300
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004301 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004302 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004303 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004304 case DataType::Type::kInt8:
4305 case DataType::Type::kUint16:
4306 case DataType::Type::kInt16:
4307 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004308 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004309 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004310 __ j(kEqual, slow_path->GetEntryLabel());
4311 } else if (value.IsStackSlot()) {
4312 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4313 __ j(kEqual, slow_path->GetEntryLabel());
4314 } else {
4315 DCHECK(value.IsConstant()) << value;
4316 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004317 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004318 }
4319 }
4320 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004321 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004322 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004323 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004324 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004325 __ j(kEqual, slow_path->GetEntryLabel());
4326 } else if (value.IsDoubleStackSlot()) {
4327 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4328 __ j(kEqual, slow_path->GetEntryLabel());
4329 } else {
4330 DCHECK(value.IsConstant()) << value;
4331 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004332 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004333 }
4334 }
4335 break;
4336 }
4337 default:
4338 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004339 }
Calin Juravled0d48522014-11-04 16:40:20 +00004340}
4341
Calin Juravle9aec02f2014-11-18 23:06:35 +00004342void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4343 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4344
4345 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004346 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004347
4348 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004349 case DataType::Type::kInt32:
4350 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004351 locations->SetInAt(0, Location::RequiresRegister());
4352 // The shift count needs to be in CL.
4353 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4354 locations->SetOut(Location::SameAsFirstInput());
4355 break;
4356 }
4357 default:
4358 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4359 }
4360}
4361
4362void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4363 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4364
4365 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004366 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004367 Location second = locations->InAt(1);
4368
4369 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004370 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004371 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004372 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004373 if (op->IsShl()) {
4374 __ shll(first_reg, second_reg);
4375 } else if (op->IsShr()) {
4376 __ sarl(first_reg, second_reg);
4377 } else {
4378 __ shrl(first_reg, second_reg);
4379 }
4380 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004381 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004382 if (op->IsShl()) {
4383 __ shll(first_reg, imm);
4384 } else if (op->IsShr()) {
4385 __ sarl(first_reg, imm);
4386 } else {
4387 __ shrl(first_reg, imm);
4388 }
4389 }
4390 break;
4391 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004392 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004393 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004394 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004395 if (op->IsShl()) {
4396 __ shlq(first_reg, second_reg);
4397 } else if (op->IsShr()) {
4398 __ sarq(first_reg, second_reg);
4399 } else {
4400 __ shrq(first_reg, second_reg);
4401 }
4402 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004403 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004404 if (op->IsShl()) {
4405 __ shlq(first_reg, imm);
4406 } else if (op->IsShr()) {
4407 __ sarq(first_reg, imm);
4408 } else {
4409 __ shrq(first_reg, imm);
4410 }
4411 }
4412 break;
4413 }
4414 default:
4415 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004416 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004417 }
4418}
4419
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004420void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4421 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004422 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004423
4424 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004425 case DataType::Type::kInt32:
4426 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004427 locations->SetInAt(0, Location::RequiresRegister());
4428 // The shift count needs to be in CL (unless it is a constant).
4429 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4430 locations->SetOut(Location::SameAsFirstInput());
4431 break;
4432 }
4433 default:
4434 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4435 UNREACHABLE();
4436 }
4437}
4438
4439void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4440 LocationSummary* locations = ror->GetLocations();
4441 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4442 Location second = locations->InAt(1);
4443
4444 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004445 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004446 if (second.IsRegister()) {
4447 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4448 __ rorl(first_reg, second_reg);
4449 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004450 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004451 __ rorl(first_reg, imm);
4452 }
4453 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004454 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004455 if (second.IsRegister()) {
4456 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4457 __ rorq(first_reg, second_reg);
4458 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004459 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004460 __ rorq(first_reg, imm);
4461 }
4462 break;
4463 default:
4464 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4465 UNREACHABLE();
4466 }
4467}
4468
Calin Juravle9aec02f2014-11-18 23:06:35 +00004469void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4470 HandleShift(shl);
4471}
4472
4473void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4474 HandleShift(shl);
4475}
4476
4477void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4478 HandleShift(shr);
4479}
4480
4481void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4482 HandleShift(shr);
4483}
4484
4485void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4486 HandleShift(ushr);
4487}
4488
4489void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4490 HandleShift(ushr);
4491}
4492
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004493void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004494 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4495 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004496 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07004497 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004498 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004499}
4500
4501void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004502 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4503 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4504 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004505}
4506
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004507void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004508 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4509 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004510 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004511 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004512 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4513 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004514}
4515
4516void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004517 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4518 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004519 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004520 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004521 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004522}
4523
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004524void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004525 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004526 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004527 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4528 if (location.IsStackSlot()) {
4529 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4530 } else if (location.IsDoubleStackSlot()) {
4531 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4532 }
4533 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004534}
4535
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004536void InstructionCodeGeneratorX86_64::VisitParameterValue(
4537 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004538 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004539}
4540
4541void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4542 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004543 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004544 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4545}
4546
4547void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4548 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4549 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004550}
4551
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004552void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4553 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004554 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004555 locations->SetInAt(0, Location::RequiresRegister());
4556 locations->SetOut(Location::RequiresRegister());
4557}
4558
4559void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4560 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004561 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004562 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004563 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004564 __ movq(locations->Out().AsRegister<CpuRegister>(),
4565 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004566 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004567 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004568 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004569 __ movq(locations->Out().AsRegister<CpuRegister>(),
4570 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4571 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004572 __ movq(locations->Out().AsRegister<CpuRegister>(),
4573 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004574 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004575}
4576
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004577void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004578 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004579 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004580 locations->SetInAt(0, Location::RequiresRegister());
4581 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004582}
4583
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004584void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4585 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004586 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4587 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004588 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004589 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004590 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004591 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004592 break;
4593
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004594 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004595 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004596 break;
4597
4598 default:
4599 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4600 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004601}
4602
David Brazdil66d126e2015-04-03 16:02:44 +01004603void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4604 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004605 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004606 locations->SetInAt(0, Location::RequiresRegister());
4607 locations->SetOut(Location::SameAsFirstInput());
4608}
4609
4610void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004611 LocationSummary* locations = bool_not->GetLocations();
4612 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4613 locations->Out().AsRegister<CpuRegister>().AsRegister());
4614 Location out = locations->Out();
4615 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4616}
4617
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004618void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004619 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004620 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004621 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004622 locations->SetInAt(i, Location::Any());
4623 }
4624 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004625}
4626
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004627void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004628 LOG(FATAL) << "Unimplemented";
4629}
4630
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004631void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004632 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004633 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004634 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004635 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4636 */
4637 switch (kind) {
4638 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004639 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004640 break;
4641 }
4642 case MemBarrierKind::kAnyStore:
4643 case MemBarrierKind::kLoadAny:
4644 case MemBarrierKind::kStoreStore: {
4645 // nop
4646 break;
4647 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004648 case MemBarrierKind::kNTStoreStore:
4649 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004650 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004651 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004652 }
4653}
4654
4655void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4656 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4657
Roland Levillain0d5a2812015-11-13 10:07:31 +00004658 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004659 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004660 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004661 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4662 object_field_get_with_read_barrier
4663 ? LocationSummary::kCallOnSlowPath
4664 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004665 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004666 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004667 }
Calin Juravle52c48962014-12-16 17:02:57 +00004668 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004669 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004670 locations->SetOut(Location::RequiresFpuRegister());
4671 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004672 // The output overlaps for an object field get when read barriers
4673 // are enabled: we do not want the move to overwrite the object's
4674 // location, as we need it to emit the read barrier.
4675 locations->SetOut(
4676 Location::RequiresRegister(),
4677 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004678 }
Calin Juravle52c48962014-12-16 17:02:57 +00004679}
4680
4681void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4682 const FieldInfo& field_info) {
4683 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4684
4685 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004686 Location base_loc = locations->InAt(0);
4687 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004688 Location out = locations->Out();
4689 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004690 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4691 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004692 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4693
Vladimir Marko61b92282017-10-11 13:23:17 +01004694 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004695 case DataType::Type::kBool:
4696 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004697 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4698 break;
4699 }
4700
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004701 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004702 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4703 break;
4704 }
4705
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004706 case DataType::Type::kUint16: {
4707 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004708 break;
4709 }
4710
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004711 case DataType::Type::kInt16: {
4712 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004713 break;
4714 }
4715
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004716 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004717 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4718 break;
4719 }
4720
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004721 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004722 // /* HeapReference<Object> */ out = *(base + offset)
4723 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004724 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004725 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004726 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08004727 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004728 if (is_volatile) {
4729 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4730 }
4731 } else {
4732 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4733 codegen_->MaybeRecordImplicitNullCheck(instruction);
4734 if (is_volatile) {
4735 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4736 }
4737 // If read barriers are enabled, emit read barriers other than
4738 // Baker's using a slow path (and also unpoison the loaded
4739 // reference, if heap poisoning is enabled).
4740 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4741 }
4742 break;
4743 }
4744
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004745 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004746 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4747 break;
4748 }
4749
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004750 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004751 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4752 break;
4753 }
4754
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004755 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004756 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4757 break;
4758 }
4759
Aart Bik66c158e2018-01-31 12:55:04 -08004760 case DataType::Type::kUint32:
4761 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004762 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004763 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004764 UNREACHABLE();
4765 }
4766
Vladimir Marko61b92282017-10-11 13:23:17 +01004767 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004768 // Potential implicit null checks, in the case of reference
4769 // fields, are handled in the previous switch statement.
4770 } else {
4771 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004772 }
Roland Levillain4d027112015-07-01 15:41:14 +01004773
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004774 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004775 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004776 // Memory barriers, in the case of references, are also handled
4777 // in the previous switch statement.
4778 } else {
4779 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4780 }
Roland Levillain4d027112015-07-01 15:41:14 +01004781 }
Calin Juravle52c48962014-12-16 17:02:57 +00004782}
4783
4784void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4785 const FieldInfo& field_info) {
4786 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4787
4788 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004789 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004790 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004791 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004792 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004793 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004794
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004795 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004796 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004797 if (is_volatile) {
4798 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4799 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4800 } else {
4801 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4802 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004803 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004804 if (is_volatile) {
4805 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4806 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4807 } else {
4808 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4809 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004810 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004811 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004812 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004813 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004814 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004815 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004816 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004817 locations->AddTemp(Location::RequiresRegister());
4818 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004819}
4820
Calin Juravle52c48962014-12-16 17:02:57 +00004821void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004822 const FieldInfo& field_info,
4823 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004824 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4825
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004826 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004827 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4828 Location value = locations->InAt(1);
4829 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004830 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00004831 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4832
4833 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004834 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004835 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004836
Mark Mendellea5af682015-10-22 17:35:49 -04004837 bool maybe_record_implicit_null_check_done = false;
4838
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004839 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004840 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004841 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004842 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04004843 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004844 __ movb(Address(base, offset),
4845 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004846 } else {
4847 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4848 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004849 break;
4850 }
4851
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004852 case DataType::Type::kUint16:
4853 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04004854 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004855 __ movw(Address(base, offset),
4856 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004857 } else {
4858 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4859 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004860 break;
4861 }
4862
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004863 case DataType::Type::kInt32:
4864 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04004865 if (value.IsConstant()) {
4866 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004867 // `field_type == DataType::Type::kReference` implies `v == 0`.
4868 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01004869 // Note: if heap poisoning is enabled, no need to poison
4870 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004871 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004872 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004873 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004874 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4875 __ movl(temp, value.AsRegister<CpuRegister>());
4876 __ PoisonHeapReference(temp);
4877 __ movl(Address(base, offset), temp);
4878 } else {
4879 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4880 }
Mark Mendell40741f32015-04-20 22:10:34 -04004881 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004882 break;
4883 }
4884
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004885 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04004886 if (value.IsConstant()) {
4887 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004888 codegen_->MoveInt64ToAddress(Address(base, offset),
4889 Address(base, offset + sizeof(int32_t)),
4890 v,
4891 instruction);
4892 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004893 } else {
4894 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4895 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004896 break;
4897 }
4898
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004899 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04004900 if (value.IsConstant()) {
4901 int32_t v =
4902 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4903 __ movl(Address(base, offset), Immediate(v));
4904 } else {
4905 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4906 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004907 break;
4908 }
4909
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004910 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04004911 if (value.IsConstant()) {
4912 int64_t v =
4913 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4914 codegen_->MoveInt64ToAddress(Address(base, offset),
4915 Address(base, offset + sizeof(int32_t)),
4916 v,
4917 instruction);
4918 maybe_record_implicit_null_check_done = true;
4919 } else {
4920 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4921 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004922 break;
4923 }
4924
Aart Bik66c158e2018-01-31 12:55:04 -08004925 case DataType::Type::kUint32:
4926 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004927 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004928 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004929 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004930 }
Calin Juravle52c48962014-12-16 17:02:57 +00004931
Mark Mendellea5af682015-10-22 17:35:49 -04004932 if (!maybe_record_implicit_null_check_done) {
4933 codegen_->MaybeRecordImplicitNullCheck(instruction);
4934 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004935
4936 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4937 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4938 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004939 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004940 }
4941
Calin Juravle52c48962014-12-16 17:02:57 +00004942 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004943 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004944 }
4945}
4946
4947void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4948 HandleFieldSet(instruction, instruction->GetFieldInfo());
4949}
4950
4951void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004952 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004953}
4954
4955void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004956 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004957}
4958
4959void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004960 HandleFieldGet(instruction, instruction->GetFieldInfo());
4961}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004962
Calin Juravle52c48962014-12-16 17:02:57 +00004963void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4964 HandleFieldGet(instruction);
4965}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004966
Calin Juravle52c48962014-12-16 17:02:57 +00004967void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4968 HandleFieldGet(instruction, instruction->GetFieldInfo());
4969}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004970
Calin Juravle52c48962014-12-16 17:02:57 +00004971void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4972 HandleFieldSet(instruction, instruction->GetFieldInfo());
4973}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004974
Calin Juravle52c48962014-12-16 17:02:57 +00004975void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004976 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004977}
4978
Vladimir Marko552a1342017-10-31 10:56:47 +00004979void LocationsBuilderX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4980 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(RAX));
4981}
4982
4983void InstructionCodeGeneratorX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4984 __ movl(CpuRegister(RDI), Immediate(instruction->GetFormat()->GetValue()));
4985 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
4986}
4987
Calin Juravlee460d1d2015-09-29 04:52:17 +01004988void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4989 HUnresolvedInstanceFieldGet* instruction) {
4990 FieldAccessCallingConventionX86_64 calling_convention;
4991 codegen_->CreateUnresolvedFieldLocationSummary(
4992 instruction, instruction->GetFieldType(), calling_convention);
4993}
4994
4995void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4996 HUnresolvedInstanceFieldGet* instruction) {
4997 FieldAccessCallingConventionX86_64 calling_convention;
4998 codegen_->GenerateUnresolvedFieldAccess(instruction,
4999 instruction->GetFieldType(),
5000 instruction->GetFieldIndex(),
5001 instruction->GetDexPc(),
5002 calling_convention);
5003}
5004
5005void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
5006 HUnresolvedInstanceFieldSet* instruction) {
5007 FieldAccessCallingConventionX86_64 calling_convention;
5008 codegen_->CreateUnresolvedFieldLocationSummary(
5009 instruction, instruction->GetFieldType(), calling_convention);
5010}
5011
5012void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
5013 HUnresolvedInstanceFieldSet* instruction) {
5014 FieldAccessCallingConventionX86_64 calling_convention;
5015 codegen_->GenerateUnresolvedFieldAccess(instruction,
5016 instruction->GetFieldType(),
5017 instruction->GetFieldIndex(),
5018 instruction->GetDexPc(),
5019 calling_convention);
5020}
5021
5022void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
5023 HUnresolvedStaticFieldGet* instruction) {
5024 FieldAccessCallingConventionX86_64 calling_convention;
5025 codegen_->CreateUnresolvedFieldLocationSummary(
5026 instruction, instruction->GetFieldType(), calling_convention);
5027}
5028
5029void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
5030 HUnresolvedStaticFieldGet* instruction) {
5031 FieldAccessCallingConventionX86_64 calling_convention;
5032 codegen_->GenerateUnresolvedFieldAccess(instruction,
5033 instruction->GetFieldType(),
5034 instruction->GetFieldIndex(),
5035 instruction->GetDexPc(),
5036 calling_convention);
5037}
5038
5039void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
5040 HUnresolvedStaticFieldSet* instruction) {
5041 FieldAccessCallingConventionX86_64 calling_convention;
5042 codegen_->CreateUnresolvedFieldLocationSummary(
5043 instruction, instruction->GetFieldType(), calling_convention);
5044}
5045
5046void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
5047 HUnresolvedStaticFieldSet* instruction) {
5048 FieldAccessCallingConventionX86_64 calling_convention;
5049 codegen_->GenerateUnresolvedFieldAccess(instruction,
5050 instruction->GetFieldType(),
5051 instruction->GetFieldIndex(),
5052 instruction->GetDexPc(),
5053 calling_convention);
5054}
5055
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005056void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005057 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5058 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5059 ? Location::RequiresRegister()
5060 : Location::Any();
5061 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005062}
5063
Calin Juravle2ae48182016-03-16 14:05:09 +00005064void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5065 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005066 return;
5067 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005068 LocationSummary* locations = instruction->GetLocations();
5069 Location obj = locations->InAt(0);
5070
5071 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005072 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005073}
5074
Calin Juravle2ae48182016-03-16 14:05:09 +00005075void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005076 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005077 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005078
5079 LocationSummary* locations = instruction->GetLocations();
5080 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005081
5082 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005083 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005084 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005085 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005086 } else {
5087 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005088 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005089 __ jmp(slow_path->GetEntryLabel());
5090 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005091 }
5092 __ j(kEqual, slow_path->GetEntryLabel());
5093}
5094
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005095void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005096 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005097}
5098
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005099void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005100 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005101 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005102 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005103 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5104 object_array_get_with_read_barrier
5105 ? LocationSummary::kCallOnSlowPath
5106 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005107 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005108 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005109 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005110 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005111 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005112 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005113 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5114 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005115 // The output overlaps for an object array get when read barriers
5116 // are enabled: we do not want the move to overwrite the array's
5117 // location, as we need it to emit the read barrier.
5118 locations->SetOut(
5119 Location::RequiresRegister(),
5120 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005121 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005122}
5123
5124void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
5125 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005126 Location obj_loc = locations->InAt(0);
5127 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005128 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005129 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005130 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005131
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005132 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01005133 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005134 case DataType::Type::kBool:
5135 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005136 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005137 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005138 break;
5139 }
5140
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005141 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005142 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005143 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005144 break;
5145 }
5146
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005147 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005148 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07005149 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5150 // Branch cases into compressed and uncompressed for each index's type.
5151 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5152 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005153 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005154 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005155 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5156 "Expecting 0=compressed, 1=uncompressed");
5157 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005158 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
5159 __ jmp(&done);
5160 __ Bind(&not_compressed);
5161 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5162 __ Bind(&done);
5163 } else {
5164 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5165 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005166 break;
5167 }
5168
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005169 case DataType::Type::kInt16: {
5170 CpuRegister out = out_loc.AsRegister<CpuRegister>();
5171 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5172 break;
5173 }
5174
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005175 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005176 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005177 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005178 break;
5179 }
5180
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005181 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005182 static_assert(
5183 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5184 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005185 // /* HeapReference<Object> */ out =
5186 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5187 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005188 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005189 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005190 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005191 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005192 } else {
5193 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005194 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
5195 codegen_->MaybeRecordImplicitNullCheck(instruction);
5196 // If read barriers are enabled, emit read barriers other than
5197 // Baker's using a slow path (and also unpoison the loaded
5198 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005199 if (index.IsConstant()) {
5200 uint32_t offset =
5201 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005202 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5203 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005204 codegen_->MaybeGenerateReadBarrierSlow(
5205 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5206 }
5207 }
5208 break;
5209 }
5210
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005211 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005212 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005213 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005214 break;
5215 }
5216
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005217 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005218 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005219 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005220 break;
5221 }
5222
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005223 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005224 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005225 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005226 break;
5227 }
5228
Aart Bik66c158e2018-01-31 12:55:04 -08005229 case DataType::Type::kUint32:
5230 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005231 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01005232 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005233 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005234 }
Roland Levillain4d027112015-07-01 15:41:14 +01005235
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005236 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005237 // Potential implicit null checks, in the case of reference
5238 // arrays, are handled in the previous switch statement.
5239 } else {
5240 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005241 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005242}
5243
5244void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005245 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005246
5247 bool needs_write_barrier =
5248 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005249 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005250
Vladimir Markoca6fff82017-10-03 14:49:14 +01005251 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005252 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005253 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005254
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005255 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005256 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005257 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005258 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005259 } else {
5260 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5261 }
5262
5263 if (needs_write_barrier) {
5264 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005265 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005266 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005267 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005268}
5269
5270void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5271 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005272 Location array_loc = locations->InAt(0);
5273 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005274 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005275 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005276 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005277 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005278 bool needs_write_barrier =
5279 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005280
5281 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005282 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005283 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005284 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005285 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005286 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005287 if (value.IsRegister()) {
5288 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005289 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005290 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005291 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005292 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005293 break;
5294 }
5295
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005296 case DataType::Type::kUint16:
5297 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005298 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005299 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005300 if (value.IsRegister()) {
5301 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005302 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005303 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005304 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005305 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005306 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005307 break;
5308 }
5309
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005310 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005311 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005312 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005313
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005314 if (!value.IsRegister()) {
5315 // Just setting null.
5316 DCHECK(instruction->InputAt(2)->IsNullConstant());
5317 DCHECK(value.IsConstant()) << value;
5318 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005319 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005320 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005321 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005322 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005323 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005324
5325 DCHECK(needs_write_barrier);
5326 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005327 Location temp_loc = locations->GetTemp(0);
5328 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005329
5330 bool can_value_be_null = instruction->GetValueCanBeNull();
5331 NearLabel do_store;
5332 if (can_value_be_null) {
5333 __ testl(register_value, register_value);
5334 __ j(kEqual, &do_store);
5335 }
5336
5337 SlowPathCode* slow_path = nullptr;
5338 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005339 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005340 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005341
5342 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5343 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5344 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005345
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005346 // Note that when Baker read barriers are enabled, the type
5347 // checks are performed without read barriers. This is fine,
5348 // even in the case where a class object is in the from-space
5349 // after the flip, as a comparison involving such a type would
5350 // not produce a false positive; it may of course produce a
5351 // false negative, in which case we would take the ArraySet
5352 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005353
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005354 // /* HeapReference<Class> */ temp = array->klass_
5355 __ movl(temp, Address(array, class_offset));
5356 codegen_->MaybeRecordImplicitNullCheck(instruction);
5357 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005358
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005359 // /* HeapReference<Class> */ temp = temp->component_type_
5360 __ movl(temp, Address(temp, component_offset));
5361 // If heap poisoning is enabled, no need to unpoison `temp`
5362 // nor the object reference in `register_value->klass`, as
5363 // we are comparing two poisoned references.
5364 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005365
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005366 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005367 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005368 __ j(kEqual, &do_put);
5369 // If heap poisoning is enabled, the `temp` reference has
5370 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005371 __ MaybeUnpoisonHeapReference(temp);
5372
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005373 // If heap poisoning is enabled, no need to unpoison the
5374 // heap reference loaded below, as it is only used for a
5375 // comparison with null.
5376 __ cmpl(Address(temp, super_offset), Immediate(0));
5377 __ j(kNotEqual, slow_path->GetEntryLabel());
5378 __ Bind(&do_put);
5379 } else {
5380 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005381 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005382 }
5383
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005384 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5385 codegen_->MarkGCCard(
5386 temp, card, array, value.AsRegister<CpuRegister>(), /* value_can_be_null= */ false);
5387
5388 if (can_value_be_null) {
5389 DCHECK(do_store.IsLinked());
5390 __ Bind(&do_store);
5391 }
5392
5393 Location source = value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005394 if (kPoisonHeapReferences) {
5395 __ movl(temp, register_value);
5396 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005397 source = temp_loc;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005398 }
5399
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005400 __ movl(address, source.AsRegister<CpuRegister>());
5401
5402 if (can_value_be_null || !needs_type_check) {
5403 codegen_->MaybeRecordImplicitNullCheck(instruction);
5404 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005405
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005406 if (slow_path != nullptr) {
5407 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005408 }
5409
5410 break;
5411 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005412
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005413 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005414 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005415 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005416 if (value.IsRegister()) {
5417 __ movl(address, value.AsRegister<CpuRegister>());
5418 } else {
5419 DCHECK(value.IsConstant()) << value;
5420 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5421 __ movl(address, Immediate(v));
5422 }
5423 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005424 break;
5425 }
5426
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005427 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005428 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005429 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005430 if (value.IsRegister()) {
5431 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005432 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005433 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005434 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005435 Address address_high =
5436 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005437 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005438 }
5439 break;
5440 }
5441
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005442 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005443 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005444 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005445 if (value.IsFpuRegister()) {
5446 __ movss(address, value.AsFpuRegister<XmmRegister>());
5447 } else {
5448 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005449 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005450 __ movl(address, Immediate(v));
5451 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005452 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005453 break;
5454 }
5455
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005456 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005457 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005458 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005459 if (value.IsFpuRegister()) {
5460 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5461 codegen_->MaybeRecordImplicitNullCheck(instruction);
5462 } else {
5463 int64_t v =
5464 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005465 Address address_high =
5466 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005467 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5468 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005469 break;
5470 }
5471
Aart Bik66c158e2018-01-31 12:55:04 -08005472 case DataType::Type::kUint32:
5473 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005474 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005475 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005476 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005477 }
5478}
5479
5480void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005481 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005482 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005483 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005484 if (!instruction->IsEmittedAtUseSite()) {
5485 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5486 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005487}
5488
5489void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005490 if (instruction->IsEmittedAtUseSite()) {
5491 return;
5492 }
5493
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005494 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005495 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005496 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5497 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005498 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005499 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005500 // Mask out most significant bit in case the array is String's array of char.
5501 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005502 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005503 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005504}
5505
5506void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005507 RegisterSet caller_saves = RegisterSet::Empty();
5508 InvokeRuntimeCallingConvention calling_convention;
5509 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5510 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5511 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005512 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005513 HInstruction* length = instruction->InputAt(1);
5514 if (!length->IsEmittedAtUseSite()) {
5515 locations->SetInAt(1, Location::RegisterOrConstant(length));
5516 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005517}
5518
5519void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5520 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005521 Location index_loc = locations->InAt(0);
5522 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005523 SlowPathCode* slow_path =
5524 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005525
Mark Mendell99dbd682015-04-22 16:18:52 -04005526 if (length_loc.IsConstant()) {
5527 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5528 if (index_loc.IsConstant()) {
5529 // BCE will remove the bounds check if we are guarenteed to pass.
5530 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5531 if (index < 0 || index >= length) {
5532 codegen_->AddSlowPath(slow_path);
5533 __ jmp(slow_path->GetEntryLabel());
5534 } else {
5535 // Some optimization after BCE may have generated this, and we should not
5536 // generate a bounds check if it is a valid range.
5537 }
5538 return;
5539 }
5540
5541 // We have to reverse the jump condition because the length is the constant.
5542 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5543 __ cmpl(index_reg, Immediate(length));
5544 codegen_->AddSlowPath(slow_path);
5545 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005546 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005547 HInstruction* array_length = instruction->InputAt(1);
5548 if (array_length->IsEmittedAtUseSite()) {
5549 // Address the length field in the array.
5550 DCHECK(array_length->IsArrayLength());
5551 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5552 Location array_loc = array_length->GetLocations()->InAt(0);
5553 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005554 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005555 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5556 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005557 CpuRegister length_reg = CpuRegister(TMP);
5558 __ movl(length_reg, array_len);
5559 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005560 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005561 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005562 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005563 // Checking the bound for general case:
5564 // Array of char or String's array when the compression feature off.
5565 if (index_loc.IsConstant()) {
5566 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5567 __ cmpl(array_len, Immediate(value));
5568 } else {
5569 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5570 }
5571 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005572 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005573 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005574 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005575 }
5576 codegen_->AddSlowPath(slow_path);
5577 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005578 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005579}
5580
5581void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5582 CpuRegister card,
5583 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005584 CpuRegister value,
5585 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005586 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005587 if (value_can_be_null) {
5588 __ testl(value, value);
5589 __ j(kEqual, &is_null);
5590 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005591 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005592 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005593 /* no_rip= */ true));
Roland Levillainc73f0522018-08-14 15:16:50 +01005594 // Calculate the offset (in the card table) of the card corresponding to
5595 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005596 __ movq(temp, object);
5597 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005598 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5599 // `object`'s card.
5600 //
5601 // Register `card` contains the address of the card table. Note that the card
5602 // table's base is biased during its creation so that it always starts at an
5603 // address whose least-significant byte is equal to `kCardDirty` (see
5604 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5605 // below writes the `kCardDirty` (byte) value into the `object`'s card
5606 // (located at `card + object >> kCardShift`).
5607 //
5608 // This dual use of the value in register `card` (1. to calculate the location
5609 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5610 // (no need to explicitly load `kCardDirty` as an immediate value).
Roland Levillain4d027112015-07-01 15:41:14 +01005611 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005612 if (value_can_be_null) {
5613 __ Bind(&is_null);
5614 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005615}
5616
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005617void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005618 LOG(FATAL) << "Unimplemented";
5619}
5620
5621void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005622 if (instruction->GetNext()->IsSuspendCheck() &&
5623 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5624 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5625 // The back edge will generate the suspend check.
5626 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5627 }
5628
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005629 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5630}
5631
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005632void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005633 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5634 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005635 // In suspend check slow path, usually there are no caller-save registers at all.
5636 // If SIMD instructions are present, however, we force spilling all live SIMD
5637 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005638 locations->SetCustomSlowPathCallerSaves(
5639 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005640}
5641
5642void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005643 HBasicBlock* block = instruction->GetBlock();
5644 if (block->GetLoopInformation() != nullptr) {
5645 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5646 // The back edge will generate the suspend check.
5647 return;
5648 }
5649 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5650 // The goto will generate the suspend check.
5651 return;
5652 }
5653 GenerateSuspendCheck(instruction, nullptr);
5654}
5655
5656void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5657 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005658 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005659 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5660 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005661 slow_path =
5662 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005663 instruction->SetSlowPath(slow_path);
5664 codegen_->AddSlowPath(slow_path);
5665 if (successor != nullptr) {
5666 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005667 }
5668 } else {
5669 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5670 }
5671
Andreas Gampe542451c2016-07-26 09:02:02 -07005672 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005673 /* no_rip= */ true),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005674 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005675 if (successor == nullptr) {
5676 __ j(kNotEqual, slow_path->GetEntryLabel());
5677 __ Bind(slow_path->GetReturnLabel());
5678 } else {
5679 __ j(kEqual, codegen_->GetLabelOf(successor));
5680 __ jmp(slow_path->GetEntryLabel());
5681 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005682}
5683
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005684X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5685 return codegen_->GetAssembler();
5686}
5687
5688void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005689 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005690 Location source = move->GetSource();
5691 Location destination = move->GetDestination();
5692
5693 if (source.IsRegister()) {
5694 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005695 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005696 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005697 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005698 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005699 } else {
5700 DCHECK(destination.IsDoubleStackSlot());
5701 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005702 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005703 }
5704 } else if (source.IsStackSlot()) {
5705 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005706 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005707 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005708 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005709 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005710 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005711 } else {
5712 DCHECK(destination.IsStackSlot());
5713 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5714 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5715 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005716 } else if (source.IsDoubleStackSlot()) {
5717 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005718 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005719 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005720 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005721 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5722 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005723 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005724 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005725 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5726 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5727 }
Aart Bik5576f372017-03-23 16:17:37 -07005728 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005729 if (destination.IsFpuRegister()) {
5730 __ movups(destination.AsFpuRegister<XmmRegister>(),
5731 Address(CpuRegister(RSP), source.GetStackIndex()));
5732 } else {
5733 DCHECK(destination.IsSIMDStackSlot());
5734 size_t high = kX86_64WordSize;
5735 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5736 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5737 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5738 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5739 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005740 } else if (source.IsConstant()) {
5741 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005742 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5743 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005744 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005745 if (value == 0) {
5746 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5747 } else {
5748 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5749 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005750 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005751 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005752 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005753 }
5754 } else if (constant->IsLongConstant()) {
5755 int64_t value = constant->AsLongConstant()->GetValue();
5756 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005757 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005758 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005759 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005760 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005761 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005762 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005763 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005764 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005765 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005766 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005767 } else {
5768 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005769 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005770 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5771 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005772 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005773 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005774 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005775 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005776 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005777 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005778 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005779 } else {
5780 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005781 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005782 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005783 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005784 } else if (source.IsFpuRegister()) {
5785 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005786 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005787 } else if (destination.IsStackSlot()) {
5788 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005789 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005790 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005791 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005792 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005793 } else {
5794 DCHECK(destination.IsSIMDStackSlot());
5795 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5796 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005797 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005798 }
5799}
5800
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005801void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005802 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005803 __ movl(Address(CpuRegister(RSP), mem), reg);
5804 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005805}
5806
Mark Mendell8a1c7282015-06-29 15:41:28 -04005807void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5808 __ movq(CpuRegister(TMP), reg1);
5809 __ movq(reg1, reg2);
5810 __ movq(reg2, CpuRegister(TMP));
5811}
5812
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005813void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5814 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5815 __ movq(Address(CpuRegister(RSP), mem), reg);
5816 __ movq(reg, CpuRegister(TMP));
5817}
5818
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005819void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5820 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5821 __ movss(Address(CpuRegister(RSP), mem), reg);
5822 __ movd(reg, CpuRegister(TMP));
5823}
5824
5825void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5826 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5827 __ movsd(Address(CpuRegister(RSP), mem), reg);
5828 __ movd(reg, CpuRegister(TMP));
5829}
5830
Aart Bikcfe50bb2017-12-12 14:54:12 -08005831void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
5832 size_t extra_slot = 2 * kX86_64WordSize;
5833 __ subq(CpuRegister(RSP), Immediate(extra_slot));
5834 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
5835 ExchangeMemory64(0, mem + extra_slot, 2);
5836 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
5837 __ addq(CpuRegister(RSP), Immediate(extra_slot));
5838}
5839
5840void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
5841 ScratchRegisterScope ensure_scratch(
5842 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5843
5844 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5845 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5846 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5847 Address(CpuRegister(RSP), mem2 + stack_offset));
5848 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5849 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5850 CpuRegister(ensure_scratch.GetRegister()));
5851}
5852
5853void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
5854 ScratchRegisterScope ensure_scratch(
5855 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5856
5857 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5858
5859 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
5860 for (int i = 0; i < num_of_qwords; i++) {
5861 __ movq(CpuRegister(TMP),
5862 Address(CpuRegister(RSP), mem1 + stack_offset));
5863 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5864 Address(CpuRegister(RSP), mem2 + stack_offset));
5865 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
5866 CpuRegister(TMP));
5867 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5868 CpuRegister(ensure_scratch.GetRegister()));
5869 stack_offset += kX86_64WordSize;
5870 }
5871}
5872
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005873void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005874 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005875 Location source = move->GetSource();
5876 Location destination = move->GetDestination();
5877
5878 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005879 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005880 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005881 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005882 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005883 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005884 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005885 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005886 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005887 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005888 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005889 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005890 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005891 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005892 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005893 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5894 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5895 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005896 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005897 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005898 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005899 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005900 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005901 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005902 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005903 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08005904 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
5905 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
5906 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
5907 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5908 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
5909 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005910 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005911 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005912 }
5913}
5914
5915
5916void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5917 __ pushq(CpuRegister(reg));
5918}
5919
5920
5921void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5922 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005923}
5924
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005925void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005926 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00005927 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
5928 const size_t status_byte_offset =
5929 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01005930 constexpr uint32_t shifted_visibly_initialized_value =
5931 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00005932
Vladimir Markobf121912019-06-04 13:49:05 +01005933 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00005934 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005935 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005936}
5937
Vladimir Marko175e7862018-03-27 09:03:13 +00005938void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
5939 CpuRegister temp) {
5940 uint32_t path_to_root = check->GetBitstringPathToRoot();
5941 uint32_t mask = check->GetBitstringMask();
5942 DCHECK(IsPowerOfTwo(mask + 1));
5943 size_t mask_bits = WhichPowerOf2(mask + 1);
5944
5945 if (mask_bits == 16u) {
5946 // Compare the bitstring in memory.
5947 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
5948 } else {
5949 // /* uint32_t */ temp = temp->status_
5950 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
5951 // Compare the bitstring bits using SUB.
5952 __ subl(temp, Immediate(path_to_root));
5953 // Shift out bits that do not contribute to the comparison.
5954 __ shll(temp, Immediate(32u - mask_bits));
5955 }
5956}
5957
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005958HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5959 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005960 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005961 case HLoadClass::LoadKind::kInvalid:
5962 LOG(FATAL) << "UNREACHABLE";
5963 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005964 case HLoadClass::LoadKind::kReferrersClass:
5965 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005966 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005967 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005968 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko695348f2020-05-19 14:42:02 +01005969 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005970 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005971 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005972 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01005973 DCHECK(GetCompilerOptions().IsJitCompiler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005974 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005975 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005976 break;
5977 }
5978 return desired_class_load_kind;
5979}
5980
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005981void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005982 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005983 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005984 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005985 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005986 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005987 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005988 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005989 return;
5990 }
Vladimir Marko41559982017-01-06 14:04:23 +00005991 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005992
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005993 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5994 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005995 ? LocationSummary::kCallOnSlowPath
5996 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005997 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005998 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005999 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006000 }
6001
Vladimir Marko41559982017-01-06 14:04:23 +00006002 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006003 locations->SetInAt(0, Location::RequiresRegister());
6004 }
6005 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006006 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6007 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6008 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006009 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006010 } else {
6011 // For non-Baker read barrier we have a temp-clobbering call.
6012 }
6013 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006014}
6015
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006016Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006017 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006018 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006019 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006020 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006021 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006022 PatchInfo<Label>* info = &jit_class_patches_.back();
6023 return &info->label;
6024}
6025
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006026// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6027// move.
6028void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006029 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006030 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006031 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006032 return;
6033 }
Vladimir Marko41559982017-01-06 14:04:23 +00006034 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006035
Vladimir Marko41559982017-01-06 14:04:23 +00006036 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006037 Location out_loc = locations->Out();
6038 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006039
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006040 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6041 ? kWithoutReadBarrier
6042 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006043 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00006044 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006045 case HLoadClass::LoadKind::kReferrersClass: {
6046 DCHECK(!cls->CanCallRuntime());
6047 DCHECK(!cls->MustGenerateClinitCheck());
6048 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6049 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
6050 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006051 cls,
6052 out_loc,
6053 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006054 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006055 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006056 break;
6057 }
6058 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01006059 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6060 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006061 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Andreas Gampe3db70682018-12-26 15:12:03 -08006062 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006063 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006064 break;
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006065 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006066 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006067 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006068 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006069 break;
6070 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006071 case HLoadClass::LoadKind::kBssEntry: {
6072 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006073 /* no_rip= */ false);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006074 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6075 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
6076 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006077 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006078 generate_null_check = true;
6079 break;
6080 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006081 case HLoadClass::LoadKind::kJitBootImageAddress: {
6082 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6083 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6084 DCHECK_NE(address, 0u);
6085 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6086 break;
6087 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006088 case HLoadClass::LoadKind::kJitTableAddress: {
6089 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006090 /* no_rip= */ true);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006091 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006092 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006093 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006094 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006095 break;
6096 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006097 default:
6098 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
6099 UNREACHABLE();
6100 }
6101
6102 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6103 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006104 SlowPathCode* slow_path =
6105 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006106 codegen_->AddSlowPath(slow_path);
6107 if (generate_null_check) {
6108 __ testl(out, out);
6109 __ j(kEqual, slow_path->GetEntryLabel());
6110 }
6111 if (cls->MustGenerateClinitCheck()) {
6112 GenerateClassInitializationCheck(slow_path, out);
6113 } else {
6114 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006115 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006116 }
6117}
6118
6119void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
6120 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006121 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006122 locations->SetInAt(0, Location::RequiresRegister());
6123 if (check->HasUses()) {
6124 locations->SetOut(Location::SameAsFirstInput());
6125 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006126 // Rely on the type initialization to save everything we need.
6127 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006128}
6129
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006130void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6131 // Custom calling convention: RAX serves as both input and output.
6132 Location location = Location::RegisterLocation(RAX);
6133 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6134}
6135
6136void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6137 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6138}
6139
Orion Hodson18259d72018-04-12 11:18:23 +01006140void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6141 // Custom calling convention: RAX serves as both input and output.
6142 Location location = Location::RegisterLocation(RAX);
6143 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6144}
6145
6146void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6147 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6148}
6149
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006150void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006151 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006152 SlowPathCode* slow_path =
6153 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006154 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006155 GenerateClassInitializationCheck(slow_path,
6156 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006157}
6158
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006159HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
6160 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006161 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006162 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006163 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006164 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko695348f2020-05-19 14:42:02 +01006165 DCHECK(!GetCompilerOptions().IsJitCompiler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006166 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006167 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006168 case HLoadString::LoadKind::kJitTableAddress:
Vladimir Marko695348f2020-05-19 14:42:02 +01006169 DCHECK(GetCompilerOptions().IsJitCompiler());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006170 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006171 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006172 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006173 }
6174 return desired_string_load_kind;
6175}
6176
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006177void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006178 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006179 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006180 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006181 locations->SetOut(Location::RegisterLocation(RAX));
6182 } else {
6183 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006184 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
6185 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006186 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006187 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006188 } else {
6189 // For non-Baker read barrier we have a temp-clobbering call.
6190 }
6191 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006192 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006193}
6194
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006195Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006196 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006197 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006198 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006199 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006200 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006201 PatchInfo<Label>* info = &jit_string_patches_.back();
6202 return &info->label;
6203}
6204
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006205// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6206// move.
6207void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006208 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006209 Location out_loc = locations->Out();
6210 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006211
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006212 switch (load->GetLoadKind()) {
6213 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006214 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6215 codegen_->GetCompilerOptions().IsBootImageExtension());
Andreas Gampe3db70682018-12-26 15:12:03 -08006216 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006217 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006218 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006219 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006220 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006221 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006222 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006223 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006224 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006225 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006226 case HLoadString::LoadKind::kBssEntry: {
6227 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006228 /* no_rip= */ false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006229 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6230 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006231 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006232 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006233 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006234 codegen_->AddSlowPath(slow_path);
6235 __ testl(out, out);
6236 __ j(kEqual, slow_path->GetEntryLabel());
6237 __ Bind(slow_path->GetExitLabel());
6238 return;
6239 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006240 case HLoadString::LoadKind::kJitBootImageAddress: {
6241 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6242 DCHECK_NE(address, 0u);
6243 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6244 return;
6245 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006246 case HLoadString::LoadKind::kJitTableAddress: {
6247 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006248 /* no_rip= */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006249 Label* fixup_label = codegen_->NewJitRootStringPatch(
6250 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006251 // /* GcRoot<mirror::String> */ out = *address
6252 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6253 return;
6254 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006255 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006256 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006257 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006258
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006259 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006260 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006261 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006262 codegen_->InvokeRuntime(kQuickResolveString,
6263 load,
6264 load->GetDexPc());
6265 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006266}
6267
David Brazdilcb1c0552015-08-04 16:22:25 +01006268static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006269 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08006270 /* no_rip= */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006271}
6272
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006273void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6274 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006275 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006276 locations->SetOut(Location::RequiresRegister());
6277}
6278
6279void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006280 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6281}
6282
6283void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006284 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006285}
6286
6287void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6288 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006289}
6290
6291void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006292 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6293 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006294 InvokeRuntimeCallingConvention calling_convention;
6295 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6296}
6297
6298void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006299 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006300 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006301}
6302
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006303// Temp is used for read barrier.
6304static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6305 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006306 !kUseBakerReadBarrier &&
6307 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006308 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006309 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6310 return 1;
6311 }
6312 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006313}
6314
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006315// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6316// interface pointer, the current interface is compared in memory.
6317// The other checks have one temp for loading the object's class.
6318static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6319 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6320 return 2;
6321 }
6322 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006323}
6324
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006325void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006326 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006327 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006328 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006329 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006330 case TypeCheckKind::kExactCheck:
6331 case TypeCheckKind::kAbstractClassCheck:
6332 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006333 case TypeCheckKind::kArrayObjectCheck: {
6334 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6335 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6336 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006337 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006338 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006339 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006340 case TypeCheckKind::kUnresolvedCheck:
6341 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006342 call_kind = LocationSummary::kCallOnSlowPath;
6343 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006344 case TypeCheckKind::kBitstringCheck:
6345 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006346 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006347
Vladimir Markoca6fff82017-10-03 14:49:14 +01006348 LocationSummary* locations =
6349 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006350 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006351 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006352 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006353 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006354 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6355 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6356 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6357 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6358 } else {
6359 locations->SetInAt(1, Location::Any());
6360 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006361 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6362 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006363 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006364}
6365
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006366void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006367 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006368 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006369 Location obj_loc = locations->InAt(0);
6370 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006371 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006372 Location out_loc = locations->Out();
6373 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006374 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6375 DCHECK_LE(num_temps, 1u);
6376 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006377 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006378 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6379 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6380 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006381 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006382 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006383
6384 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006385 // Avoid null check if we know obj is not null.
6386 if (instruction->MustDoNullCheck()) {
6387 __ testl(obj, obj);
6388 __ j(kEqual, &zero);
6389 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006390
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006391 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006392 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006393 ReadBarrierOption read_barrier_option =
6394 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006395 // /* HeapReference<Class> */ out = obj->klass_
6396 GenerateReferenceLoadTwoRegisters(instruction,
6397 out_loc,
6398 obj_loc,
6399 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006400 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006401 if (cls.IsRegister()) {
6402 __ cmpl(out, cls.AsRegister<CpuRegister>());
6403 } else {
6404 DCHECK(cls.IsStackSlot()) << cls;
6405 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6406 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006407 if (zero.IsLinked()) {
6408 // Classes must be equal for the instanceof to succeed.
6409 __ j(kNotEqual, &zero);
6410 __ movl(out, Immediate(1));
6411 __ jmp(&done);
6412 } else {
6413 __ setcc(kEqual, out);
6414 // setcc only sets the low byte.
6415 __ andl(out, Immediate(1));
6416 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006417 break;
6418 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006419
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006420 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006421 ReadBarrierOption read_barrier_option =
6422 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006423 // /* HeapReference<Class> */ out = obj->klass_
6424 GenerateReferenceLoadTwoRegisters(instruction,
6425 out_loc,
6426 obj_loc,
6427 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006428 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006429 // If the class is abstract, we eagerly fetch the super class of the
6430 // object to avoid doing a comparison we know will fail.
6431 NearLabel loop, success;
6432 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006433 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006434 GenerateReferenceLoadOneRegister(instruction,
6435 out_loc,
6436 super_offset,
6437 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006438 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006439 __ testl(out, out);
6440 // If `out` is null, we use it for the result, and jump to `done`.
6441 __ j(kEqual, &done);
6442 if (cls.IsRegister()) {
6443 __ cmpl(out, cls.AsRegister<CpuRegister>());
6444 } else {
6445 DCHECK(cls.IsStackSlot()) << cls;
6446 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6447 }
6448 __ j(kNotEqual, &loop);
6449 __ movl(out, Immediate(1));
6450 if (zero.IsLinked()) {
6451 __ jmp(&done);
6452 }
6453 break;
6454 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006455
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006456 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006457 ReadBarrierOption read_barrier_option =
6458 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006459 // /* HeapReference<Class> */ out = obj->klass_
6460 GenerateReferenceLoadTwoRegisters(instruction,
6461 out_loc,
6462 obj_loc,
6463 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006464 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006465 // Walk over the class hierarchy to find a match.
6466 NearLabel loop, success;
6467 __ Bind(&loop);
6468 if (cls.IsRegister()) {
6469 __ cmpl(out, cls.AsRegister<CpuRegister>());
6470 } else {
6471 DCHECK(cls.IsStackSlot()) << cls;
6472 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6473 }
6474 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006475 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006476 GenerateReferenceLoadOneRegister(instruction,
6477 out_loc,
6478 super_offset,
6479 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006480 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006481 __ testl(out, out);
6482 __ j(kNotEqual, &loop);
6483 // If `out` is null, we use it for the result, and jump to `done`.
6484 __ jmp(&done);
6485 __ Bind(&success);
6486 __ movl(out, Immediate(1));
6487 if (zero.IsLinked()) {
6488 __ jmp(&done);
6489 }
6490 break;
6491 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006492
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006493 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006494 ReadBarrierOption read_barrier_option =
6495 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006496 // /* HeapReference<Class> */ out = obj->klass_
6497 GenerateReferenceLoadTwoRegisters(instruction,
6498 out_loc,
6499 obj_loc,
6500 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006501 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006502 // Do an exact check.
6503 NearLabel exact_check;
6504 if (cls.IsRegister()) {
6505 __ cmpl(out, cls.AsRegister<CpuRegister>());
6506 } else {
6507 DCHECK(cls.IsStackSlot()) << cls;
6508 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6509 }
6510 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006511 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006512 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006513 GenerateReferenceLoadOneRegister(instruction,
6514 out_loc,
6515 component_offset,
6516 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006517 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006518 __ testl(out, out);
6519 // If `out` is null, we use it for the result, and jump to `done`.
6520 __ j(kEqual, &done);
6521 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6522 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006523 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006524 __ movl(out, Immediate(1));
6525 __ jmp(&done);
6526 break;
6527 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006528
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006529 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006530 // No read barrier since the slow path will retry upon failure.
6531 // /* HeapReference<Class> */ out = obj->klass_
6532 GenerateReferenceLoadTwoRegisters(instruction,
6533 out_loc,
6534 obj_loc,
6535 class_offset,
6536 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006537 if (cls.IsRegister()) {
6538 __ cmpl(out, cls.AsRegister<CpuRegister>());
6539 } else {
6540 DCHECK(cls.IsStackSlot()) << cls;
6541 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6542 }
6543 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006544 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006545 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006546 codegen_->AddSlowPath(slow_path);
6547 __ j(kNotEqual, slow_path->GetEntryLabel());
6548 __ movl(out, Immediate(1));
6549 if (zero.IsLinked()) {
6550 __ jmp(&done);
6551 }
6552 break;
6553 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006554
Calin Juravle98893e12015-10-02 21:05:03 +01006555 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006556 case TypeCheckKind::kInterfaceCheck: {
6557 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006558 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006559 // cases.
6560 //
6561 // We cannot directly call the InstanceofNonTrivial runtime
6562 // entry point without resorting to a type checking slow path
6563 // here (i.e. by calling InvokeRuntime directly), as it would
6564 // require to assign fixed registers for the inputs of this
6565 // HInstanceOf instruction (following the runtime calling
6566 // convention), which might be cluttered by the potential first
6567 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006568 //
6569 // TODO: Introduce a new runtime entry point taking the object
6570 // to test (instead of its class) as argument, and let it deal
6571 // with the read barrier issues. This will let us refactor this
6572 // case of the `switch` code as it was previously (with a direct
6573 // call to the runtime not using a type checking slow path).
6574 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006575 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006576 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006577 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006578 codegen_->AddSlowPath(slow_path);
6579 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006580 if (zero.IsLinked()) {
6581 __ jmp(&done);
6582 }
6583 break;
6584 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006585
6586 case TypeCheckKind::kBitstringCheck: {
6587 // /* HeapReference<Class> */ temp = obj->klass_
6588 GenerateReferenceLoadTwoRegisters(instruction,
6589 out_loc,
6590 obj_loc,
6591 class_offset,
6592 kWithoutReadBarrier);
6593
6594 GenerateBitstringTypeCheckCompare(instruction, out);
6595 if (zero.IsLinked()) {
6596 __ j(kNotEqual, &zero);
6597 __ movl(out, Immediate(1));
6598 __ jmp(&done);
6599 } else {
6600 __ setcc(kEqual, out);
6601 // setcc only sets the low byte.
6602 __ andl(out, Immediate(1));
6603 }
6604 break;
6605 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006606 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006607
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006608 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006609 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006610 __ xorl(out, out);
6611 }
6612
6613 if (done.IsLinked()) {
6614 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006615 }
6616
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006617 if (slow_path != nullptr) {
6618 __ Bind(slow_path->GetExitLabel());
6619 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006620}
6621
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006622void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006623 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006624 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006625 LocationSummary* locations =
6626 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006627 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006628 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6629 // Require a register for the interface check since there is a loop that compares the class to
6630 // a memory address.
6631 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006632 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6633 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6634 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6635 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006636 } else {
6637 locations->SetInAt(1, Location::Any());
6638 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006639 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6640 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006641}
6642
6643void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006644 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006645 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006646 Location obj_loc = locations->InAt(0);
6647 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006648 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006649 Location temp_loc = locations->GetTemp(0);
6650 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006651 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6652 DCHECK_GE(num_temps, 1u);
6653 DCHECK_LE(num_temps, 2u);
6654 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006655 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6656 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6657 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6658 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6659 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6660 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006661 const uint32_t object_array_data_offset =
6662 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006663
Vladimir Marko87584542017-12-12 17:47:52 +00006664 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006665 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006666 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6667 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006668 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006669
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006670
6671 NearLabel done;
6672 // Avoid null check if we know obj is not null.
6673 if (instruction->MustDoNullCheck()) {
6674 __ testl(obj, obj);
6675 __ j(kEqual, &done);
6676 }
6677
Roland Levillain0d5a2812015-11-13 10:07:31 +00006678 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006679 case TypeCheckKind::kExactCheck:
6680 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006681 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006682 GenerateReferenceLoadTwoRegisters(instruction,
6683 temp_loc,
6684 obj_loc,
6685 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006686 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006687 if (cls.IsRegister()) {
6688 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6689 } else {
6690 DCHECK(cls.IsStackSlot()) << cls;
6691 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6692 }
6693 // Jump to slow path for throwing the exception or doing a
6694 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006695 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006696 break;
6697 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006698
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006699 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006700 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006701 GenerateReferenceLoadTwoRegisters(instruction,
6702 temp_loc,
6703 obj_loc,
6704 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006705 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006706 // If the class is abstract, we eagerly fetch the super class of the
6707 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006708 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006709 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006710 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006711 GenerateReferenceLoadOneRegister(instruction,
6712 temp_loc,
6713 super_offset,
6714 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006715 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006716
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006717 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6718 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006719 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006720 // Otherwise, compare the classes.
6721 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006722 if (cls.IsRegister()) {
6723 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6724 } else {
6725 DCHECK(cls.IsStackSlot()) << cls;
6726 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6727 }
6728 __ j(kNotEqual, &loop);
6729 break;
6730 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006731
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006732 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006733 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006734 GenerateReferenceLoadTwoRegisters(instruction,
6735 temp_loc,
6736 obj_loc,
6737 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006738 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006739 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006740 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006741 __ Bind(&loop);
6742 if (cls.IsRegister()) {
6743 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6744 } else {
6745 DCHECK(cls.IsStackSlot()) << cls;
6746 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6747 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006748 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006749
Roland Levillain0d5a2812015-11-13 10:07:31 +00006750 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006751 GenerateReferenceLoadOneRegister(instruction,
6752 temp_loc,
6753 super_offset,
6754 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006755 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006756
6757 // If the class reference currently in `temp` is not null, jump
6758 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006759 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006760 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006761 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006762 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006763 break;
6764 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006765
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006766 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006767 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006768 GenerateReferenceLoadTwoRegisters(instruction,
6769 temp_loc,
6770 obj_loc,
6771 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006772 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006773 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006774 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006775 if (cls.IsRegister()) {
6776 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6777 } else {
6778 DCHECK(cls.IsStackSlot()) << cls;
6779 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6780 }
6781 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006782
6783 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006784 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006785 GenerateReferenceLoadOneRegister(instruction,
6786 temp_loc,
6787 component_offset,
6788 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006789 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006790
6791 // If the component type is not null (i.e. the object is indeed
6792 // an array), jump to label `check_non_primitive_component_type`
6793 // to further check that this component type is not a primitive
6794 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006795 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006796 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006797 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006798 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006799 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006800 break;
6801 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006802
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006803 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006804 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006805 //
6806 // We cannot directly call the CheckCast runtime entry point
6807 // without resorting to a type checking slow path here (i.e. by
6808 // calling InvokeRuntime directly), as it would require to
6809 // assign fixed registers for the inputs of this HInstanceOf
6810 // instruction (following the runtime calling convention), which
6811 // might be cluttered by the potential first read barrier
6812 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006813 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006814 break;
6815 }
6816
Vladimir Marko175e7862018-03-27 09:03:13 +00006817 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006818 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
6819 // We can not get false positives by doing this.
6820 // /* HeapReference<Class> */ temp = obj->klass_
6821 GenerateReferenceLoadTwoRegisters(instruction,
6822 temp_loc,
6823 obj_loc,
6824 class_offset,
6825 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006826
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006827 // /* HeapReference<Class> */ temp = temp->iftable_
6828 GenerateReferenceLoadTwoRegisters(instruction,
6829 temp_loc,
6830 temp_loc,
6831 iftable_offset,
6832 kWithoutReadBarrier);
6833 // Iftable is never null.
6834 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
6835 // Maybe poison the `cls` for direct comparison with memory.
6836 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
6837 // Loop through the iftable and check if any class matches.
6838 NearLabel start_loop;
6839 __ Bind(&start_loop);
6840 // Need to subtract first to handle the empty array case.
6841 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6842 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6843 // Go to next interface if the classes do not match.
6844 __ cmpl(cls.AsRegister<CpuRegister>(),
6845 CodeGeneratorX86_64::ArrayAddress(temp,
6846 maybe_temp2_loc,
6847 TIMES_4,
6848 object_array_data_offset));
6849 __ j(kNotEqual, &start_loop); // Return if same class.
6850 // If `cls` was poisoned above, unpoison it.
6851 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006852 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006853 }
6854
6855 case TypeCheckKind::kBitstringCheck: {
6856 // /* HeapReference<Class> */ temp = obj->klass_
6857 GenerateReferenceLoadTwoRegisters(instruction,
6858 temp_loc,
6859 obj_loc,
6860 class_offset,
6861 kWithoutReadBarrier);
6862
6863 GenerateBitstringTypeCheckCompare(instruction, temp);
6864 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
6865 break;
6866 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006867 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006868
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006869 if (done.IsLinked()) {
6870 __ Bind(&done);
6871 }
6872
Roland Levillain0d5a2812015-11-13 10:07:31 +00006873 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006874}
6875
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006876void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006877 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6878 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006879 InvokeRuntimeCallingConvention calling_convention;
6880 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6881}
6882
6883void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006884 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006885 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006886 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006887 if (instruction->IsEnter()) {
6888 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6889 } else {
6890 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6891 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006892}
6893
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05306894void LocationsBuilderX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6895 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6896 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6897 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6898 locations->SetInAt(0, Location::RequiresRegister());
6899 // There is no immediate variant of negated bitwise and in X86.
6900 locations->SetInAt(1, Location::RequiresRegister());
6901 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6902}
6903
6904void LocationsBuilderX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6905 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6906 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6907 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6908 locations->SetInAt(0, Location::RequiresRegister());
6909 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6910}
6911
6912void InstructionCodeGeneratorX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6913 LocationSummary* locations = instruction->GetLocations();
6914 Location first = locations->InAt(0);
6915 Location second = locations->InAt(1);
6916 Location dest = locations->Out();
6917 __ andn(dest.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
6918}
6919
6920void InstructionCodeGeneratorX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6921 LocationSummary* locations = instruction->GetLocations();
6922 Location src = locations->InAt(0);
6923 Location dest = locations->Out();
6924 switch (instruction->GetOpKind()) {
6925 case HInstruction::kAnd:
6926 __ blsr(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6927 break;
6928 case HInstruction::kXor:
6929 __ blsmsk(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6930 break;
6931 default:
6932 LOG(FATAL) << "Unreachable";
6933 }
6934}
6935
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006936void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6937void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6938void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6939
6940void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6941 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006942 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006943 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
6944 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006945 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006946 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006947 locations->SetOut(Location::SameAsFirstInput());
6948}
6949
6950void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6951 HandleBitwiseOperation(instruction);
6952}
6953
6954void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6955 HandleBitwiseOperation(instruction);
6956}
6957
6958void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6959 HandleBitwiseOperation(instruction);
6960}
6961
6962void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6963 LocationSummary* locations = instruction->GetLocations();
6964 Location first = locations->InAt(0);
6965 Location second = locations->InAt(1);
6966 DCHECK(first.Equals(locations->Out()));
6967
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006968 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006969 if (second.IsRegister()) {
6970 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006971 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006972 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006973 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006974 } else {
6975 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006976 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006977 }
6978 } else if (second.IsConstant()) {
6979 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6980 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006981 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006982 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006983 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006984 } else {
6985 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006986 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006987 }
6988 } else {
6989 Address address(CpuRegister(RSP), second.GetStackIndex());
6990 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006991 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006992 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006993 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006994 } else {
6995 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006996 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006997 }
6998 }
6999 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007000 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007001 CpuRegister first_reg = first.AsRegister<CpuRegister>();
7002 bool second_is_constant = false;
7003 int64_t value = 0;
7004 if (second.IsConstant()) {
7005 second_is_constant = true;
7006 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007007 }
Mark Mendell40741f32015-04-20 22:10:34 -04007008 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007009
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007010 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007011 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007012 if (is_int32_value) {
7013 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
7014 } else {
7015 __ andq(first_reg, codegen_->LiteralInt64Address(value));
7016 }
7017 } else if (second.IsDoubleStackSlot()) {
7018 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007019 } else {
7020 __ andq(first_reg, second.AsRegister<CpuRegister>());
7021 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007022 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007023 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007024 if (is_int32_value) {
7025 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
7026 } else {
7027 __ orq(first_reg, codegen_->LiteralInt64Address(value));
7028 }
7029 } else if (second.IsDoubleStackSlot()) {
7030 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007031 } else {
7032 __ orq(first_reg, second.AsRegister<CpuRegister>());
7033 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007034 } else {
7035 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007036 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007037 if (is_int32_value) {
7038 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
7039 } else {
7040 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
7041 }
7042 } else if (second.IsDoubleStackSlot()) {
7043 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007044 } else {
7045 __ xorq(first_reg, second.AsRegister<CpuRegister>());
7046 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007047 }
7048 }
7049}
7050
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007051void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
7052 HInstruction* instruction,
7053 Location out,
7054 uint32_t offset,
7055 Location maybe_temp,
7056 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007057 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007058 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007059 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007060 if (kUseBakerReadBarrier) {
7061 // Load with fast path based Baker's read barrier.
7062 // /* HeapReference<Object> */ out = *(out + offset)
7063 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007064 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007065 } else {
7066 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007067 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007068 // in the following move operation, as we will need it for the
7069 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007070 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007071 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007072 // /* HeapReference<Object> */ out = *(out + offset)
7073 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007074 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007075 }
7076 } else {
7077 // Plain load with no read barrier.
7078 // /* HeapReference<Object> */ out = *(out + offset)
7079 __ movl(out_reg, Address(out_reg, offset));
7080 __ MaybeUnpoisonHeapReference(out_reg);
7081 }
7082}
7083
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007084void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
7085 HInstruction* instruction,
7086 Location out,
7087 Location obj,
7088 uint32_t offset,
7089 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007090 CpuRegister out_reg = out.AsRegister<CpuRegister>();
7091 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007092 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007093 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007094 if (kUseBakerReadBarrier) {
7095 // Load with fast path based Baker's read barrier.
7096 // /* HeapReference<Object> */ out = *(obj + offset)
7097 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007098 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007099 } else {
7100 // Load with slow path based read barrier.
7101 // /* HeapReference<Object> */ out = *(obj + offset)
7102 __ movl(out_reg, Address(obj_reg, offset));
7103 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7104 }
7105 } else {
7106 // Plain load with no read barrier.
7107 // /* HeapReference<Object> */ out = *(obj + offset)
7108 __ movl(out_reg, Address(obj_reg, offset));
7109 __ MaybeUnpoisonHeapReference(out_reg);
7110 }
7111}
7112
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007113void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
7114 HInstruction* instruction,
7115 Location root,
7116 const Address& address,
7117 Label* fixup_label,
7118 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007119 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007120 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007121 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007122 if (kUseBakerReadBarrier) {
7123 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7124 // Baker's read barrier are used:
7125 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007126 // root = obj.field;
7127 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7128 // if (temp != null) {
7129 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007130 // }
7131
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007132 // /* GcRoot<mirror::Object> */ root = *address
7133 __ movl(root_reg, address);
7134 if (fixup_label != nullptr) {
7135 __ Bind(fixup_label);
7136 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007137 static_assert(
7138 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7139 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7140 "have different sizes.");
7141 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7142 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7143 "have different sizes.");
7144
Vladimir Marko953437b2016-08-24 08:30:46 +00007145 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007146 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007147 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007148 codegen_->AddSlowPath(slow_path);
7149
Roland Levillaind966ce72017-02-09 16:20:14 +00007150 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
7151 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007152 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Andreas Gampe3db70682018-12-26 15:12:03 -08007153 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip= */ true), Immediate(0));
Roland Levillaind966ce72017-02-09 16:20:14 +00007154 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007155 __ j(kNotEqual, slow_path->GetEntryLabel());
7156 __ Bind(slow_path->GetExitLabel());
7157 } else {
7158 // GC root loaded through a slow path for read barriers other
7159 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007160 // /* GcRoot<mirror::Object>* */ root = address
7161 __ leaq(root_reg, address);
7162 if (fixup_label != nullptr) {
7163 __ Bind(fixup_label);
7164 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007165 // /* mirror::Object* */ root = root->Read()
7166 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7167 }
7168 } else {
7169 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007170 // /* GcRoot<mirror::Object> */ root = *address
7171 __ movl(root_reg, address);
7172 if (fixup_label != nullptr) {
7173 __ Bind(fixup_label);
7174 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007175 // Note that GC roots are not affected by heap poisoning, thus we
7176 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007177 }
7178}
7179
7180void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7181 Location ref,
7182 CpuRegister obj,
7183 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007184 bool needs_null_check) {
7185 DCHECK(kEmitCompilerReadBarrier);
7186 DCHECK(kUseBakerReadBarrier);
7187
7188 // /* HeapReference<Object> */ ref = *(obj + offset)
7189 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007190 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007191}
7192
7193void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7194 Location ref,
7195 CpuRegister obj,
7196 uint32_t data_offset,
7197 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007198 bool needs_null_check) {
7199 DCHECK(kEmitCompilerReadBarrier);
7200 DCHECK(kUseBakerReadBarrier);
7201
Roland Levillain3d312422016-06-23 13:53:42 +01007202 static_assert(
7203 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7204 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007205 // /* HeapReference<Object> */ ref =
7206 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007207 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007208 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007209}
7210
7211void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7212 Location ref,
7213 CpuRegister obj,
7214 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007215 bool needs_null_check,
7216 bool always_update_field,
7217 CpuRegister* temp1,
7218 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007219 DCHECK(kEmitCompilerReadBarrier);
7220 DCHECK(kUseBakerReadBarrier);
7221
7222 // In slow path based read barriers, the read barrier call is
7223 // inserted after the original load. However, in fast path based
7224 // Baker's read barriers, we need to perform the load of
7225 // mirror::Object::monitor_ *before* the original reference load.
7226 // This load-load ordering is required by the read barrier.
7227 // The fast path/slow path (for Baker's algorithm) should look like:
7228 //
7229 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7230 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7231 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007232 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007233 // if (is_gray) {
7234 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7235 // }
7236 //
7237 // Note: the original implementation in ReadBarrier::Barrier is
7238 // slightly more complex as:
7239 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007240 // the high-bits of rb_state, which are expected to be all zeroes
7241 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
7242 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007243 // - it performs additional checks that we do not do here for
7244 // performance reasons.
7245
7246 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007247 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7248
Vladimir Marko953437b2016-08-24 08:30:46 +00007249 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007250 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007251 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007252 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7253 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7254 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7255
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007256 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007257 // ref = ReadBarrier::Mark(ref);
7258 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7259 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007260 if (needs_null_check) {
7261 MaybeRecordImplicitNullCheck(instruction);
7262 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007263
7264 // Load fence to prevent load-load reordering.
7265 // Note that this is a no-op, thanks to the x86-64 memory model.
7266 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7267
7268 // The actual reference load.
7269 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007270 __ movl(ref_reg, src); // Flags are unaffected.
7271
7272 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7273 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007274 SlowPathCode* slow_path;
7275 if (always_update_field) {
7276 DCHECK(temp1 != nullptr);
7277 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007278 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007279 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp1, *temp2);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007280 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007281 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007282 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007283 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007284 AddSlowPath(slow_path);
7285
7286 // We have done the "if" of the gray bit check above, now branch based on the flags.
7287 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007288
7289 // Object* ref = ref_addr->AsMirrorPtr()
7290 __ MaybeUnpoisonHeapReference(ref_reg);
7291
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007292 __ Bind(slow_path->GetExitLabel());
7293}
7294
7295void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7296 Location out,
7297 Location ref,
7298 Location obj,
7299 uint32_t offset,
7300 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007301 DCHECK(kEmitCompilerReadBarrier);
7302
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007303 // Insert a slow path based read barrier *after* the reference load.
7304 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007305 // If heap poisoning is enabled, the unpoisoning of the loaded
7306 // reference will be carried out by the runtime within the slow
7307 // path.
7308 //
7309 // Note that `ref` currently does not get unpoisoned (when heap
7310 // poisoning is enabled), which is alright as the `ref` argument is
7311 // not used by the artReadBarrierSlow entry point.
7312 //
7313 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007314 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007315 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7316 AddSlowPath(slow_path);
7317
Roland Levillain0d5a2812015-11-13 10:07:31 +00007318 __ jmp(slow_path->GetEntryLabel());
7319 __ Bind(slow_path->GetExitLabel());
7320}
7321
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007322void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7323 Location out,
7324 Location ref,
7325 Location obj,
7326 uint32_t offset,
7327 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007328 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007329 // Baker's read barriers shall be handled by the fast path
7330 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7331 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007332 // If heap poisoning is enabled, unpoisoning will be taken care of
7333 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007334 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007335 } else if (kPoisonHeapReferences) {
7336 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7337 }
7338}
7339
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007340void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7341 Location out,
7342 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007343 DCHECK(kEmitCompilerReadBarrier);
7344
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007345 // Insert a slow path based read barrier *after* the GC root load.
7346 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007347 // Note that GC roots are not affected by heap poisoning, so we do
7348 // not need to do anything special for this here.
7349 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007350 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007351 AddSlowPath(slow_path);
7352
Roland Levillain0d5a2812015-11-13 10:07:31 +00007353 __ jmp(slow_path->GetEntryLabel());
7354 __ Bind(slow_path->GetExitLabel());
7355}
7356
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007357void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007358 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007359 LOG(FATAL) << "Unreachable";
7360}
7361
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007362void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007363 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007364 LOG(FATAL) << "Unreachable";
7365}
7366
Mark Mendellfe57faa2015-09-18 09:26:15 -04007367// Simple implementation of packed switch - generate cascaded compare/jumps.
7368void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7369 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007370 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007371 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007372 locations->AddTemp(Location::RequiresRegister());
7373 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007374}
7375
7376void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7377 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007378 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007379 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007380 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7381 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7382 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007383 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7384
7385 // Should we generate smaller inline compare/jumps?
7386 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7387 // Figure out the correct compare values and jump conditions.
7388 // Handle the first compare/branch as a special case because it might
7389 // jump to the default case.
7390 DCHECK_GT(num_entries, 2u);
7391 Condition first_condition;
7392 uint32_t index;
7393 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7394 if (lower_bound != 0) {
7395 first_condition = kLess;
7396 __ cmpl(value_reg_in, Immediate(lower_bound));
7397 __ j(first_condition, codegen_->GetLabelOf(default_block));
7398 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7399
7400 index = 1;
7401 } else {
7402 // Handle all the compare/jumps below.
7403 first_condition = kBelow;
7404 index = 0;
7405 }
7406
7407 // Handle the rest of the compare/jumps.
7408 for (; index + 1 < num_entries; index += 2) {
7409 int32_t compare_to_value = lower_bound + index + 1;
7410 __ cmpl(value_reg_in, Immediate(compare_to_value));
7411 // Jump to successors[index] if value < case_value[index].
7412 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7413 // Jump to successors[index + 1] if value == case_value[index + 1].
7414 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7415 }
7416
7417 if (index != num_entries) {
7418 // There are an odd number of entries. Handle the last one.
7419 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007420 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007421 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7422 }
7423
7424 // And the default for any other value.
7425 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7426 __ jmp(codegen_->GetLabelOf(default_block));
7427 }
7428 return;
7429 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007430
7431 // Remove the bias, if needed.
7432 Register value_reg_out = value_reg_in.AsRegister();
7433 if (lower_bound != 0) {
7434 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7435 value_reg_out = temp_reg.AsRegister();
7436 }
7437 CpuRegister value_reg(value_reg_out);
7438
7439 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007440 __ cmpl(value_reg, Immediate(num_entries - 1));
7441 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007442
Mark Mendell9c86b482015-09-18 13:36:07 -04007443 // We are in the range of the table.
7444 // Load the address of the jump table in the constant area.
7445 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007446
Mark Mendell9c86b482015-09-18 13:36:07 -04007447 // Load the (signed) offset from the jump table.
7448 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7449
7450 // Add the offset to the address of the table base.
7451 __ addq(temp_reg, base_reg);
7452
7453 // And jump.
7454 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007455}
7456
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007457void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7458 ATTRIBUTE_UNUSED) {
7459 LOG(FATAL) << "Unreachable";
7460}
7461
7462void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7463 ATTRIBUTE_UNUSED) {
7464 LOG(FATAL) << "Unreachable";
7465}
7466
Aart Bikc5d47542016-01-27 17:00:35 -08007467void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7468 if (value == 0) {
7469 __ xorl(dest, dest);
7470 } else {
7471 __ movl(dest, Immediate(value));
7472 }
7473}
7474
Mark Mendell92e83bf2015-05-07 11:25:03 -04007475void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7476 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007477 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007478 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007479 } else if (IsUint<32>(value)) {
7480 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007481 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7482 } else {
7483 __ movq(dest, Immediate(value));
7484 }
7485}
7486
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007487void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7488 if (value == 0) {
7489 __ xorps(dest, dest);
7490 } else {
7491 __ movss(dest, LiteralInt32Address(value));
7492 }
7493}
7494
7495void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7496 if (value == 0) {
7497 __ xorpd(dest, dest);
7498 } else {
7499 __ movsd(dest, LiteralInt64Address(value));
7500 }
7501}
7502
7503void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7504 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7505}
7506
7507void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7508 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7509}
7510
Aart Bika19616e2016-02-01 18:57:58 -08007511void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7512 if (value == 0) {
7513 __ testl(dest, dest);
7514 } else {
7515 __ cmpl(dest, Immediate(value));
7516 }
7517}
7518
7519void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7520 if (IsInt<32>(value)) {
7521 if (value == 0) {
7522 __ testq(dest, dest);
7523 } else {
7524 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7525 }
7526 } else {
7527 // Value won't fit in an int.
7528 __ cmpq(dest, LiteralInt64Address(value));
7529 }
7530}
7531
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007532void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7533 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007534 GenerateIntCompare(lhs_reg, rhs);
7535}
7536
7537void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007538 if (rhs.IsConstant()) {
7539 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007540 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007541 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007542 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007543 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007544 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007545 }
7546}
7547
7548void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7549 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7550 if (rhs.IsConstant()) {
7551 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7552 Compare64BitValue(lhs_reg, value);
7553 } else if (rhs.IsDoubleStackSlot()) {
7554 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7555 } else {
7556 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7557 }
7558}
7559
7560Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7561 Location index,
7562 ScaleFactor scale,
7563 uint32_t data_offset) {
7564 return index.IsConstant() ?
7565 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7566 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7567}
7568
Mark Mendellcfa410b2015-05-25 16:02:44 -04007569void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7570 DCHECK(dest.IsDoubleStackSlot());
7571 if (IsInt<32>(value)) {
7572 // Can move directly as an int32 constant.
7573 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7574 Immediate(static_cast<int32_t>(value)));
7575 } else {
7576 Load64BitValue(CpuRegister(TMP), value);
7577 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7578 }
7579}
7580
Mark Mendell9c86b482015-09-18 13:36:07 -04007581/**
7582 * Class to handle late fixup of offsets into constant area.
7583 */
7584class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7585 public:
7586 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7587 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7588
7589 protected:
7590 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7591
7592 CodeGeneratorX86_64* codegen_;
7593
7594 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01007595 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell9c86b482015-09-18 13:36:07 -04007596 // Patch the correct offset for the instruction. We use the address of the
7597 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7598 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7599 int32_t relative_position = constant_offset - pos;
7600
7601 // Patch in the right value.
7602 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7603 }
7604
7605 // Location in constant area that the fixup refers to.
7606 size_t offset_into_constant_area_;
7607};
7608
7609/**
7610 t * Class to handle late fixup of offsets to a jump table that will be created in the
7611 * constant area.
7612 */
7613class JumpTableRIPFixup : public RIPFixup {
7614 public:
7615 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7616 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7617
7618 void CreateJumpTable() {
7619 X86_64Assembler* assembler = codegen_->GetAssembler();
7620
7621 // Ensure that the reference to the jump table has the correct offset.
7622 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7623 SetOffset(offset_in_constant_table);
7624
7625 // Compute the offset from the start of the function to this jump table.
7626 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7627
7628 // Populate the jump table with the correct values for the jump table.
7629 int32_t num_entries = switch_instr_->GetNumEntries();
7630 HBasicBlock* block = switch_instr_->GetBlock();
7631 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7632 // The value that we want is the target offset - the position of the table.
7633 for (int32_t i = 0; i < num_entries; i++) {
7634 HBasicBlock* b = successors[i];
7635 Label* l = codegen_->GetLabelOf(b);
7636 DCHECK(l->IsBound());
7637 int32_t offset_to_block = l->Position() - current_table_offset;
7638 assembler->AppendInt32(offset_to_block);
7639 }
7640 }
7641
7642 private:
7643 const HPackedSwitch* switch_instr_;
7644};
7645
Mark Mendellf55c3e02015-03-26 21:07:46 -04007646void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7647 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007648 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007649 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7650 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007651 assembler->Align(4, 0);
7652 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007653
7654 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007655 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007656 jump_table->CreateJumpTable();
7657 }
7658
7659 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007660 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007661 }
7662
7663 // And finish up.
7664 CodeGenerator::Finalize(allocator);
7665}
7666
Mark Mendellf55c3e02015-03-26 21:07:46 -04007667Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007668 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007669 return Address::RIP(fixup);
7670}
7671
7672Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007673 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007674 return Address::RIP(fixup);
7675}
7676
7677Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007678 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007679 return Address::RIP(fixup);
7680}
7681
7682Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007683 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007684 return Address::RIP(fixup);
7685}
7686
Andreas Gampe85b62f22015-09-09 13:15:38 -07007687// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007688void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007689 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007690 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007691 return;
7692 }
7693
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007694 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007695
7696 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7697 if (trg.Equals(return_loc)) {
7698 return;
7699 }
7700
7701 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007702 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007703 parallel_move.AddMove(return_loc, trg, type, nullptr);
7704 GetMoveResolver()->EmitNativeCode(&parallel_move);
7705}
7706
Mark Mendell9c86b482015-09-18 13:36:07 -04007707Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7708 // Create a fixup to be used to create and address the jump table.
7709 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007710 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007711
7712 // We have to populate the jump tables.
7713 fixups_to_jump_tables_.push_back(table_fixup);
7714 return Address::RIP(table_fixup);
7715}
7716
Mark Mendellea5af682015-10-22 17:35:49 -04007717void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7718 const Address& addr_high,
7719 int64_t v,
7720 HInstruction* instruction) {
7721 if (IsInt<32>(v)) {
7722 int32_t v_32 = v;
7723 __ movq(addr_low, Immediate(v_32));
7724 MaybeRecordImplicitNullCheck(instruction);
7725 } else {
7726 // Didn't fit in a register. Do it in pieces.
7727 int32_t low_v = Low32Bits(v);
7728 int32_t high_v = High32Bits(v);
7729 __ movl(addr_low, Immediate(low_v));
7730 MaybeRecordImplicitNullCheck(instruction);
7731 __ movl(addr_high, Immediate(high_v));
7732 }
7733}
7734
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007735void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7736 const uint8_t* roots_data,
7737 const PatchInfo<Label>& info,
7738 uint64_t index_in_table) const {
7739 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7740 uintptr_t address =
7741 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00007742 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007743 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7744 dchecked_integral_cast<uint32_t>(address);
7745}
7746
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007747void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7748 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007749 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007750 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007751 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007752 }
7753
7754 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007755 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007756 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007757 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007758 }
7759}
7760
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05307761bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
7762 return codegen_->GetInstructionSetFeatures().HasAVX();
7763}
7764
7765bool LocationsBuilderX86_64::CpuHasAvx2FeatureFlag() {
7766 return codegen_->GetInstructionSetFeatures().HasAVX2();
7767}
7768
7769bool InstructionCodeGeneratorX86_64::CpuHasAvxFeatureFlag() {
7770 return codegen_->GetInstructionSetFeatures().HasAVX();
7771}
7772
7773bool InstructionCodeGeneratorX86_64::CpuHasAvx2FeatureFlag() {
7774 return codegen_->GetInstructionSetFeatures().HasAVX2();
7775}
7776
Roland Levillain4d027112015-07-01 15:41:14 +01007777#undef __
7778
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007779} // namespace x86_64
7780} // namespace art