blob: 48a3d90f6f76277a33a8817cc7635bc1271aab50 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000019#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010025#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070026#include "heap_poisoning.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080027#include "intrinsics.h"
28#include "intrinsics_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000029#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070031#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070032#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070033#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010034#include "mirror/object_reference.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000035#include "scoped_thread_state_change-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "thread.h"
37#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010038#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010039#include "utils/x86_64/assembler_x86_64.h"
40#include "utils/x86_64/managed_register_x86_64.h"
41
Vladimir Marko0a516052019-10-14 13:00:44 +000042namespace art {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010043
Roland Levillain0d5a2812015-11-13 10:07:31 +000044template<class MirrorType>
45class GcRoot;
46
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010047namespace x86_64 {
48
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010049static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010050static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000051// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
52// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
53// generates less code/data with a small num_entries.
54static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010055
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000056static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000057static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010058
Mark Mendell24f2dfa2015-01-14 19:51:45 -050059static constexpr int kC2ConditionMask = 0x400;
60
Vladimir Marko3232dbb2018-07-25 15:42:46 +010061static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
62 // Custom calling convention: RAX serves as both input and output.
63 RegisterSet caller_saves = RegisterSet::Empty();
64 caller_saves.Add(Location::RegisterLocation(RAX));
65 return caller_saves;
66}
67
Roland Levillain7cbd27f2016-08-11 23:53:33 +010068// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
69#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070070#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010071
Andreas Gampe85b62f22015-09-09 13:15:38 -070072class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000074 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010075
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010076 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +000077 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010078 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000079 if (instruction_->CanThrowIntoCatchBlock()) {
80 // Live registers will be restored in the catch block if caught.
81 SaveLiveRegisters(codegen, instruction_->GetLocations());
82 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010083 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000084 instruction_,
85 instruction_->GetDexPc(),
86 this);
Roland Levillain888d0672015-11-23 18:53:50 +000087 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010088 }
89
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010090 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010091
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010092 const char* GetDescription() const override { return "NullCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010093
Nicolas Geoffraye5038322014-07-04 09:41:32 +010094 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010095 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
96};
97
Andreas Gampe85b62f22015-09-09 13:15:38 -070098class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000099 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000100 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000101
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100102 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000103 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000104 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100105 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000106 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000107 }
108
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100109 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100110
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100111 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100112
Calin Juravled0d48522014-11-04 16:40:20 +0000113 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000114 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
115};
116
Andreas Gampe85b62f22015-09-09 13:15:38 -0700117class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000118 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100119 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000121
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100122 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000123 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100124 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000125 if (is_div_) {
126 __ negl(cpu_reg_);
127 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400128 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000129 }
130
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100132 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 if (is_div_) {
134 __ negq(cpu_reg_);
135 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400136 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000138 }
Calin Juravled0d48522014-11-04 16:40:20 +0000139 __ jmp(GetExitLabel());
140 }
141
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100142 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100143
Calin Juravled0d48522014-11-04 16:40:20 +0000144 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000145 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100146 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000147 const bool is_div_;
148 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000149};
150
Andreas Gampe85b62f22015-09-09 13:15:38 -0700151class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100153 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000154 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000155
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100156 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700157 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000158 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000159 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700160 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100161 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000162 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700163 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100164 if (successor_ == nullptr) {
165 __ jmp(GetReturnLabel());
166 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000167 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100168 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000169 }
170
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100171 Label* GetReturnLabel() {
172 DCHECK(successor_ == nullptr);
173 return &return_label_;
174 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000175
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100176 HBasicBlock* GetSuccessor() const {
177 return successor_;
178 }
179
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100180 const char* GetDescription() const override { return "SuspendCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100181
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000182 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100183 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184 Label return_label_;
185
186 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
187};
188
Andreas Gampe85b62f22015-09-09 13:15:38 -0700189class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100191 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000192 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100193
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100194 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100195 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000196 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100197 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000198 if (instruction_->CanThrowIntoCatchBlock()) {
199 // Live registers will be restored in the catch block if caught.
200 SaveLiveRegisters(codegen, instruction_->GetLocations());
201 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400202 // Are we using an array length from memory?
203 HInstruction* array_length = instruction_->InputAt(1);
204 Location length_loc = locations->InAt(1);
205 InvokeRuntimeCallingConvention calling_convention;
206 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
207 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100208 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100209 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400210 Location array_loc = array_length->GetLocations()->InAt(0);
211 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
212 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
213 // Check for conflicts with index.
214 if (length_loc.Equals(locations->InAt(0))) {
215 // We know we aren't using parameter 2.
216 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
217 }
218 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100219 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100220 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700221 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400222 }
223
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000224 // We're moving two locations to locations that could overlap, so we need a parallel
225 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000226 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000228 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100229 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400230 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100231 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100232 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100233 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
234 ? kQuickThrowStringBounds
235 : kQuickThrowArrayBounds;
236 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100237 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000238 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 }
240
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100241 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100242
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100243 const char* GetDescription() const override { return "BoundsCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100244
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100245 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100246 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
247};
248
Andreas Gampe85b62f22015-09-09 13:15:38 -0700249class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100250 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100251 LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
252 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000253 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100254 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100257 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000258 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100259 Location out = locations->Out();
260 const uint32_t dex_pc = instruction_->GetDexPc();
261 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
262 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
263
Roland Levillain0d5a2812015-11-13 10:07:31 +0000264 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000266 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000267
Vladimir Markoea4c1262017-02-06 19:59:33 +0000268 // Custom calling convention: RAX serves as both input and output.
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100269 if (must_resolve_type) {
270 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_64_codegen->GetGraph()->GetDexFile()));
271 dex::TypeIndex type_index = cls_->GetTypeIndex();
272 __ movl(CpuRegister(RAX), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100273 x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
274 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100275 // If we also must_do_clinit, the resolved type is now in the correct register.
276 } else {
277 DCHECK(must_do_clinit);
278 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
279 x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
280 }
281 if (must_do_clinit) {
282 x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
283 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000284 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000287 if (out.IsValid()) {
288 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000289 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000290 }
291
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100293 __ jmp(GetExitLabel());
294 }
295
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100296 const char* GetDescription() const override { return "LoadClassSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100297
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100298 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000299 // The class this slow path will load.
300 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100301
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000302 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100303};
304
Vladimir Markoaad75c62016-10-03 08:46:48 +0000305class LoadStringSlowPathX86_64 : public SlowPathCode {
306 public:
307 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
308
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100309 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000310 LocationSummary* locations = instruction_->GetLocations();
311 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
312
313 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
314 __ Bind(GetEntryLabel());
315 SaveLiveRegisters(codegen, locations);
316
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000317 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100318 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000319 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000320 x86_64_codegen->InvokeRuntime(kQuickResolveString,
321 instruction_,
322 instruction_->GetDexPc(),
323 this);
324 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
325 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
326 RestoreLiveRegisters(codegen, locations);
327
Vladimir Markoaad75c62016-10-03 08:46:48 +0000328 __ jmp(GetExitLabel());
329 }
330
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100331 const char* GetDescription() const override { return "LoadStringSlowPathX86_64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000332
333 private:
334 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
335};
336
Andreas Gampe85b62f22015-09-09 13:15:38 -0700337class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000338 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000339 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000340 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100342 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000351 if (kPoisonHeapReferences &&
352 instruction_->IsCheckCast() &&
353 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
354 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
355 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
356 }
357
Vladimir Marko87584542017-12-12 17:47:52 +0000358 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000359 SaveLiveRegisters(codegen, locations);
360 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000361
362 // We're moving two locations to locations that could overlap, so we need a parallel
363 // move resolver.
364 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800366 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100367 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800368 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800369 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100370 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000371 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100372 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800373 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000374 } else {
375 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
377 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000378 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000379
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 if (!is_fatal_) {
381 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000382 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000384
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 RestoreLiveRegisters(codegen, locations);
386 __ jmp(GetExitLabel());
387 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000388 }
389
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100390 const char* GetDescription() const override { return "TypeCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100391
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100392 bool IsFatal() const override { return is_fatal_; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000393
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000394 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000395 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396
397 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
398};
399
Andreas Gampe85b62f22015-09-09 13:15:38 -0700400class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700401 public:
Aart Bik42249c32016-01-07 15:33:50 -0800402 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000403 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100405 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000406 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100408 LocationSummary* locations = instruction_->GetLocations();
409 SaveLiveRegisters(codegen, locations);
410 InvokeRuntimeCallingConvention calling_convention;
411 x86_64_codegen->Load32BitValue(
412 CpuRegister(calling_convention.GetRegisterAt(0)),
413 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100414 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100415 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100418 const char* GetDescription() const override { return "DeoptimizationSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100428 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100434 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100438 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100443 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100448 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100453 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000454 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100455 RestoreLiveRegisters(codegen, locations);
456 __ jmp(GetExitLabel());
457 }
458
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100459 const char* GetDescription() const override { return "ArraySetSlowPathX86_64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100460
461 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100462 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
463};
464
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100465// Slow path marking an object reference `ref` during a read
466// barrier. The field `obj.field` in the object `obj` holding this
467// reference does not get updated by this slow path after marking (see
468// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
469//
470// This means that after the execution of this slow path, `ref` will
471// always be up-to-date, but `obj.field` may not; i.e., after the
472// flip, `ref` will be a to-space reference, but `obj.field` will
473// probably still be a from-space reference (unless it gets updated by
474// another thread, or if another thread installed another object
475// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000476class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
477 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100478 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
479 Location ref,
480 bool unpoison_ref_before_marking)
481 : SlowPathCode(instruction),
482 ref_(ref),
483 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 DCHECK(kEmitCompilerReadBarrier);
485 }
486
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100487 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86_64"; }
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100489 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000490 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100491 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
492 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000493 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100494 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 DCHECK(instruction_->IsInstanceFieldGet() ||
496 instruction_->IsStaticFieldGet() ||
497 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100498 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 instruction_->IsLoadClass() ||
500 instruction_->IsLoadString() ||
501 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100502 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100503 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
504 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000505 << "Unexpected instruction in read barrier marking slow path: "
506 << instruction_->DebugName();
507
508 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100509 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000510 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100511 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000512 }
Roland Levillain4359e612016-07-20 11:32:19 +0100513 // No need to save live registers; it's taken care of by the
514 // entrypoint. Also, there is no need to update the stack mask,
515 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100517 DCHECK_NE(ref_reg, RSP);
518 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100519 // "Compact" slow path, saving two moves.
520 //
521 // Instead of using the standard runtime calling convention (input
522 // and output in R0):
523 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100524 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100525 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100527 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100529 // of a dedicated entrypoint:
530 //
531 // rX <- ReadBarrierMarkRegX(rX)
532 //
533 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100534 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100535 // This runtime call does not require a stack map.
536 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000537 __ jmp(GetExitLabel());
538 }
539
540 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100541 // The location (register) of the marked object reference.
542 const Location ref_;
543 // Should the reference in `ref_` be unpoisoned prior to marking it?
544 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000545
546 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
547};
548
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100549// Slow path marking an object reference `ref` during a read barrier,
550// and if needed, atomically updating the field `obj.field` in the
551// object `obj` holding this reference after marking (contrary to
552// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
553// `obj.field`).
554//
555// This means that after the execution of this slow path, both `ref`
556// and `obj.field` will be up-to-date; i.e., after the flip, both will
557// hold the same to-space reference (unless another thread installed
558// another object reference (different from `ref`) in `obj.field`).
559class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
560 public:
561 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
562 Location ref,
563 CpuRegister obj,
564 const Address& field_addr,
565 bool unpoison_ref_before_marking,
566 CpuRegister temp1,
567 CpuRegister temp2)
568 : SlowPathCode(instruction),
569 ref_(ref),
570 obj_(obj),
571 field_addr_(field_addr),
572 unpoison_ref_before_marking_(unpoison_ref_before_marking),
573 temp1_(temp1),
574 temp2_(temp2) {
575 DCHECK(kEmitCompilerReadBarrier);
576 }
577
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100578 const char* GetDescription() const override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100579 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
580 }
581
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100582 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100583 LocationSummary* locations = instruction_->GetLocations();
584 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
585 Register ref_reg = ref_cpu_reg.AsRegister();
586 DCHECK(locations->CanCall());
587 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
588 // This slow path is only used by the UnsafeCASObject intrinsic.
589 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
590 << "Unexpected instruction in read barrier marking and field updating slow path: "
591 << instruction_->DebugName();
592 DCHECK(instruction_->GetLocations()->Intrinsified());
593 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
594
595 __ Bind(GetEntryLabel());
596 if (unpoison_ref_before_marking_) {
597 // Object* ref = ref_addr->AsMirrorPtr()
598 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
599 }
600
601 // Save the old (unpoisoned) reference.
602 __ movl(temp1_, ref_cpu_reg);
603
604 // No need to save live registers; it's taken care of by the
605 // entrypoint. Also, there is no need to update the stack mask,
606 // as this runtime call will not trigger a garbage collection.
607 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
608 DCHECK_NE(ref_reg, RSP);
609 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
610 // "Compact" slow path, saving two moves.
611 //
612 // Instead of using the standard runtime calling convention (input
613 // and output in R0):
614 //
615 // RDI <- ref
616 // RAX <- ReadBarrierMark(RDI)
617 // ref <- RAX
618 //
619 // we just use rX (the register containing `ref`) as input and output
620 // of a dedicated entrypoint:
621 //
622 // rX <- ReadBarrierMarkRegX(rX)
623 //
624 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100625 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100626 // This runtime call does not require a stack map.
627 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
628
629 // If the new reference is different from the old reference,
630 // update the field in the holder (`*field_addr`).
631 //
632 // Note that this field could also hold a different object, if
633 // another thread had concurrently changed it. In that case, the
634 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
635 // operation below would abort the CAS, leaving the field as-is.
636 NearLabel done;
637 __ cmpl(temp1_, ref_cpu_reg);
638 __ j(kEqual, &done);
639
640 // Update the the holder's field atomically. This may fail if
641 // mutator updates before us, but it's OK. This is achived
642 // using a strong compare-and-set (CAS) operation with relaxed
643 // memory synchronization ordering, where the expected value is
644 // the old reference and the desired value is the new reference.
645 // This operation is implemented with a 32-bit LOCK CMPXLCHG
646 // instruction, which requires the expected value (the old
647 // reference) to be in EAX. Save RAX beforehand, and move the
648 // expected value (stored in `temp1_`) into EAX.
649 __ movq(temp2_, CpuRegister(RAX));
650 __ movl(CpuRegister(RAX), temp1_);
651
652 // Convenience aliases.
653 CpuRegister base = obj_;
654 CpuRegister expected = CpuRegister(RAX);
655 CpuRegister value = ref_cpu_reg;
656
657 bool base_equals_value = (base.AsRegister() == value.AsRegister());
658 Register value_reg = ref_reg;
659 if (kPoisonHeapReferences) {
660 if (base_equals_value) {
661 // If `base` and `value` are the same register location, move
662 // `value_reg` to a temporary register. This way, poisoning
663 // `value_reg` won't invalidate `base`.
664 value_reg = temp1_.AsRegister();
665 __ movl(CpuRegister(value_reg), base);
666 }
667
668 // Check that the register allocator did not assign the location
669 // of `expected` (RAX) to `value` nor to `base`, so that heap
670 // poisoning (when enabled) works as intended below.
671 // - If `value` were equal to `expected`, both references would
672 // be poisoned twice, meaning they would not be poisoned at
673 // all, as heap poisoning uses address negation.
674 // - If `base` were equal to `expected`, poisoning `expected`
675 // would invalidate `base`.
676 DCHECK_NE(value_reg, expected.AsRegister());
677 DCHECK_NE(base.AsRegister(), expected.AsRegister());
678
679 __ PoisonHeapReference(expected);
680 __ PoisonHeapReference(CpuRegister(value_reg));
681 }
682
683 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
684
685 // If heap poisoning is enabled, we need to unpoison the values
686 // that were poisoned earlier.
687 if (kPoisonHeapReferences) {
688 if (base_equals_value) {
689 // `value_reg` has been moved to a temporary register, no need
690 // to unpoison it.
691 } else {
692 __ UnpoisonHeapReference(CpuRegister(value_reg));
693 }
694 // No need to unpoison `expected` (RAX), as it is be overwritten below.
695 }
696
697 // Restore RAX.
698 __ movq(CpuRegister(RAX), temp2_);
699
700 __ Bind(&done);
701 __ jmp(GetExitLabel());
702 }
703
704 private:
705 // The location (register) of the marked object reference.
706 const Location ref_;
707 // The register containing the object holding the marked object reference field.
708 const CpuRegister obj_;
709 // The address of the marked reference field. The base of this address must be `obj_`.
710 const Address field_addr_;
711
712 // Should the reference in `ref_` be unpoisoned prior to marking it?
713 const bool unpoison_ref_before_marking_;
714
715 const CpuRegister temp1_;
716 const CpuRegister temp2_;
717
718 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
719};
720
Roland Levillain0d5a2812015-11-13 10:07:31 +0000721// Slow path generating a read barrier for a heap reference.
722class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
723 public:
724 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
725 Location out,
726 Location ref,
727 Location obj,
728 uint32_t offset,
729 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000730 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000731 out_(out),
732 ref_(ref),
733 obj_(obj),
734 offset_(offset),
735 index_(index) {
736 DCHECK(kEmitCompilerReadBarrier);
737 // If `obj` is equal to `out` or `ref`, it means the initial
738 // object has been overwritten by (or after) the heap object
739 // reference load to be instrumented, e.g.:
740 //
741 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000742 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743 //
744 // In that case, we have lost the information about the original
745 // object, and the emitted read barrier cannot work properly.
746 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
747 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
748}
749
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100750 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000751 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
752 LocationSummary* locations = instruction_->GetLocations();
753 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
754 DCHECK(locations->CanCall());
755 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100756 DCHECK(instruction_->IsInstanceFieldGet() ||
757 instruction_->IsStaticFieldGet() ||
758 instruction_->IsArrayGet() ||
759 instruction_->IsInstanceOf() ||
760 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700761 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000762 << "Unexpected instruction in read barrier for heap reference slow path: "
763 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000764
765 __ Bind(GetEntryLabel());
766 SaveLiveRegisters(codegen, locations);
767
768 // We may have to change the index's value, but as `index_` is a
769 // constant member (like other "inputs" of this slow path),
770 // introduce a copy of it, `index`.
771 Location index = index_;
772 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100773 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000774 if (instruction_->IsArrayGet()) {
775 // Compute real offset and store it in index_.
776 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
777 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
778 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
779 // We are about to change the value of `index_reg` (see the
780 // calls to art::x86_64::X86_64Assembler::shll and
781 // art::x86_64::X86_64Assembler::AddImmediate below), but it
782 // has not been saved by the previous call to
783 // art::SlowPathCode::SaveLiveRegisters, as it is a
784 // callee-save register --
785 // art::SlowPathCode::SaveLiveRegisters does not consider
786 // callee-save registers, as it has been designed with the
787 // assumption that callee-save registers are supposed to be
788 // handled by the called function. So, as a callee-save
789 // register, `index_reg` _would_ eventually be saved onto
790 // the stack, but it would be too late: we would have
791 // changed its value earlier. Therefore, we manually save
792 // it here into another freely available register,
793 // `free_reg`, chosen of course among the caller-save
794 // registers (as a callee-save `free_reg` register would
795 // exhibit the same problem).
796 //
797 // Note we could have requested a temporary register from
798 // the register allocator instead; but we prefer not to, as
799 // this is a slow path, and we know we can find a
800 // caller-save register that is available.
801 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
802 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
803 index_reg = free_reg;
804 index = Location::RegisterLocation(index_reg);
805 } else {
806 // The initial register stored in `index_` has already been
807 // saved in the call to art::SlowPathCode::SaveLiveRegisters
808 // (as it is not a callee-save register), so we can freely
809 // use it.
810 }
811 // Shifting the index value contained in `index_reg` by the
812 // scale factor (2) cannot overflow in practice, as the
813 // runtime is unable to allocate object arrays with a size
814 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
815 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
816 static_assert(
817 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
818 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
819 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
820 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100821 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
822 // intrinsics, `index_` is not shifted by a scale factor of 2
823 // (as in the case of ArrayGet), as it is actually an offset
824 // to an object field within an object.
825 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000826 DCHECK(instruction_->GetLocations()->Intrinsified());
827 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
828 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
829 << instruction_->AsInvoke()->GetIntrinsic();
830 DCHECK_EQ(offset_, 0U);
831 DCHECK(index_.IsRegister());
832 }
833 }
834
835 // We're moving two or three locations to locations that could
836 // overlap, so we need a parallel move resolver.
837 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100838 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000839 parallel_move.AddMove(ref_,
840 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100841 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000842 nullptr);
843 parallel_move.AddMove(obj_,
844 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100845 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000846 nullptr);
847 if (index.IsValid()) {
848 parallel_move.AddMove(index,
849 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100850 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000851 nullptr);
852 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
853 } else {
854 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
855 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
856 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100857 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000858 instruction_,
859 instruction_->GetDexPc(),
860 this);
861 CheckEntrypointTypes<
862 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
863 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
864
865 RestoreLiveRegisters(codegen, locations);
866 __ jmp(GetExitLabel());
867 }
868
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100869 const char* GetDescription() const override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 return "ReadBarrierForHeapReferenceSlowPathX86_64";
871 }
872
873 private:
874 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
875 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
876 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
877 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
878 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
879 return static_cast<CpuRegister>(i);
880 }
881 }
882 // We shall never fail to find a free caller-save register, as
883 // there are more than two core caller-save registers on x86-64
884 // (meaning it is possible to find one which is different from
885 // `ref` and `obj`).
886 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
887 LOG(FATAL) << "Could not find a free caller-save register";
888 UNREACHABLE();
889 }
890
Roland Levillain0d5a2812015-11-13 10:07:31 +0000891 const Location out_;
892 const Location ref_;
893 const Location obj_;
894 const uint32_t offset_;
895 // An additional location containing an index to an array.
896 // Only used for HArrayGet and the UnsafeGetObject &
897 // UnsafeGetObjectVolatile intrinsics.
898 const Location index_;
899
900 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
901};
902
903// Slow path generating a read barrier for a GC root.
904class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
905 public:
906 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000907 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000908 DCHECK(kEmitCompilerReadBarrier);
909 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000910
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100911 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000912 LocationSummary* locations = instruction_->GetLocations();
913 DCHECK(locations->CanCall());
914 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000915 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
916 << "Unexpected instruction in read barrier for GC root slow path: "
917 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918
919 __ Bind(GetEntryLabel());
920 SaveLiveRegisters(codegen, locations);
921
922 InvokeRuntimeCallingConvention calling_convention;
923 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
924 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100925 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000926 instruction_,
927 instruction_->GetDexPc(),
928 this);
929 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
930 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
931
932 RestoreLiveRegisters(codegen, locations);
933 __ jmp(GetExitLabel());
934 }
935
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100936 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86_64"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000937
938 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000939 const Location out_;
940 const Location root_;
941
942 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
943};
944
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100945#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100946// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
947#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100948
Roland Levillain4fa13f62015-07-06 18:11:54 +0100949inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700950 switch (cond) {
951 case kCondEQ: return kEqual;
952 case kCondNE: return kNotEqual;
953 case kCondLT: return kLess;
954 case kCondLE: return kLessEqual;
955 case kCondGT: return kGreater;
956 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700957 case kCondB: return kBelow;
958 case kCondBE: return kBelowEqual;
959 case kCondA: return kAbove;
960 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700961 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100962 LOG(FATAL) << "Unreachable";
963 UNREACHABLE();
964}
965
Aart Bike9f37602015-10-09 11:15:55 -0700966// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100967inline Condition X86_64FPCondition(IfCondition cond) {
968 switch (cond) {
969 case kCondEQ: return kEqual;
970 case kCondNE: return kNotEqual;
971 case kCondLT: return kBelow;
972 case kCondLE: return kBelowEqual;
973 case kCondGT: return kAbove;
974 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700975 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800976 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100977 LOG(FATAL) << "Unreachable";
978 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700979}
980
Vladimir Markodc151b22015-10-15 18:02:30 +0100981HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
982 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +0100983 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000984 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100985}
986
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100987void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
988 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800989 // All registers are assumed to be correctly set up.
Vladimir Marko4ee8e292017-06-02 15:39:30 +0000990
Vladimir Marko58155012015-08-19 12:49:41 +0000991 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
992 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100993 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000994 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100995 uint32_t offset =
996 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Andreas Gampe3db70682018-12-26 15:12:03 -0800997 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000998 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100999 }
Vladimir Marko58155012015-08-19 12:49:41 +00001000 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00001001 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001002 break;
Vladimir Marko65979462017-05-19 17:25:12 +01001003 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01001004 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01001005 __ leal(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001006 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001007 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01001008 break;
Vladimir Markob066d432018-01-03 13:14:37 +00001009 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
1010 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1011 __ movl(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001012 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001013 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001014 break;
1015 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001016 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001017 __ movq(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001018 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001019 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01001020 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko58155012015-08-19 12:49:41 +00001021 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001022 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001023 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
1024 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
1025 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001026 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
1027 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1028 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001029 }
Vladimir Marko58155012015-08-19 12:49:41 +00001030 }
1031
1032 switch (invoke->GetCodePtrLocation()) {
1033 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1034 __ call(&frame_entry_label_);
1035 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001036 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1037 // (callee_method + offset_of_quick_compiled_code)()
1038 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1039 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001040 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001041 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001042 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001043 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001044
1045 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001046}
1047
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001048void CodeGeneratorX86_64::GenerateVirtualCall(
1049 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001050 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1051 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1052 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001053
1054 // Use the calling convention instead of the location of the receiver, as
1055 // intrinsics may have put the receiver in a different register. In the intrinsics
1056 // slow path, the arguments have been moved to the right place, so here we are
1057 // guaranteed that the receiver is the first register of the calling convention.
1058 InvokeDexCallingConvention calling_convention;
1059 Register receiver = calling_convention.GetRegisterAt(0);
1060
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001061 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001062 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001063 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001064 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001065 // Instead of simply (possibly) unpoisoning `temp` here, we should
1066 // emit a read barrier for the previous class reference load.
1067 // However this is not required in practice, as this is an
1068 // intermediate/temporary reference and because the current
1069 // concurrent copying collector keeps the from-space memory
1070 // intact/accessible until the end of the marking phase (the
1071 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001072 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001073
Nicolas Geoffray20036d82019-11-28 16:15:00 +00001074 MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001075
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001076 // temp = temp->GetMethodAt(method_offset);
1077 __ movq(temp, Address(temp, method_offset));
1078 // call temp->GetEntryPoint();
1079 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001080 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001081 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001082}
1083
Vladimir Marko6fd16062018-06-26 11:02:04 +01001084void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001085 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, intrinsic_data);
1086 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001087}
1088
Vladimir Markob066d432018-01-03 13:14:37 +00001089void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001090 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, boot_image_offset);
1091 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00001092}
1093
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001094void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
1095 boot_image_method_patches_.emplace_back(
1096 invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001097 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001098}
1099
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001100void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
1101 method_bss_entry_patches_.emplace_back(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
1102 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001103}
1104
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001105void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1106 boot_image_type_patches_.emplace_back(
1107 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001108 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001109}
1110
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001111Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001112 type_bss_entry_patches_.emplace_back(
1113 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001114 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001115}
1116
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001117void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1118 boot_image_string_patches_.emplace_back(
1119 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1120 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001121}
1122
Vladimir Markoaad75c62016-10-03 08:46:48 +00001123Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001124 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001125 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001126 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001127}
1128
Vladimir Marko6fd16062018-06-26 11:02:04 +01001129void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
1130 if (GetCompilerOptions().IsBootImage()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001131 __ leal(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001132 RecordBootImageIntrinsicPatch(boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001133 } else if (GetCompilerOptions().GetCompilePic()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001134 __ movl(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001135 RecordBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001136 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001137 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001138 gc::Heap* heap = Runtime::Current()->GetHeap();
1139 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001140 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01001141 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
1142 }
1143}
1144
Vladimir Marko6fd16062018-06-26 11:02:04 +01001145void CodeGeneratorX86_64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1146 uint32_t boot_image_offset) {
1147 DCHECK(invoke->IsStatic());
1148 InvokeRuntimeCallingConvention calling_convention;
1149 CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0));
1150 if (GetCompilerOptions().IsBootImage()) {
1151 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1152 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1153 __ leal(argument,
Andreas Gampe3db70682018-12-26 15:12:03 -08001154 Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001155 MethodReference target_method = invoke->GetTargetMethod();
1156 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1157 boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_);
1158 __ Bind(&boot_image_type_patches_.back().label);
1159 } else {
1160 LoadBootImageAddress(argument, boot_image_offset);
1161 }
1162 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1163 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1164}
1165
Vladimir Markoaad75c62016-10-03 08:46:48 +00001166// The label points to the end of the "movl" or another instruction but the literal offset
1167// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1168constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1169
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001170template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001171inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1172 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001173 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001174 for (const PatchInfo<Label>& info : infos) {
1175 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1176 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001177 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001178 }
1179}
1180
Vladimir Marko6fd16062018-06-26 11:02:04 +01001181template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1182linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1183 const DexFile* target_dex_file,
1184 uint32_t pc_insn_offset,
1185 uint32_t boot_image_offset) {
1186 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1187 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001188}
1189
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001190void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001191 DCHECK(linker_patches->empty());
1192 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001193 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001194 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001195 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001196 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001197 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001198 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01001199 boot_image_other_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001200 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01001201 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001202 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1203 boot_image_method_patches_, linker_patches);
1204 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1205 boot_image_type_patches_, linker_patches);
1206 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001207 boot_image_string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001208 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001209 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001210 DCHECK(boot_image_type_patches_.empty());
1211 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01001212 }
1213 if (GetCompilerOptions().IsBootImage()) {
1214 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1215 boot_image_other_patches_, linker_patches);
1216 } else {
1217 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
1218 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001219 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001220 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1221 method_bss_entry_patches_, linker_patches);
1222 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1223 type_bss_entry_patches_, linker_patches);
1224 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1225 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001226 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001227}
1228
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001229void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001230 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001231}
1232
1233void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001234 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001235}
1236
Vladimir Markoa0431112018-06-25 09:32:54 +01001237const X86_64InstructionSetFeatures& CodeGeneratorX86_64::GetInstructionSetFeatures() const {
1238 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86_64InstructionSetFeatures();
1239}
1240
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001241size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1242 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1243 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001244}
1245
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001246size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1247 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1248 return kX86_64WordSize;
1249}
1250
1251size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001252 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001253 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001254 } else {
1255 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1256 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001257 return GetSlowPathFPWidth();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001258}
1259
1260size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001261 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001262 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001263 } else {
1264 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1265 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001266 return GetSlowPathFPWidth();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001267}
1268
Calin Juravle175dc732015-08-25 15:42:32 +01001269void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1270 HInstruction* instruction,
1271 uint32_t dex_pc,
1272 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001273 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001274 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1275 if (EntrypointRequiresStackMap(entrypoint)) {
1276 RecordPcInfo(instruction, dex_pc, slow_path);
1277 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001278}
1279
Roland Levillaindec8f632016-07-22 17:10:06 +01001280void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1281 HInstruction* instruction,
1282 SlowPathCode* slow_path) {
1283 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001284 GenerateInvokeRuntime(entry_point_offset);
1285}
1286
1287void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001288 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip= */ true));
Roland Levillaindec8f632016-07-22 17:10:06 +01001289}
1290
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001291static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001292// Use a fake return address register to mimic Quick.
1293static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001294CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001295 const CompilerOptions& compiler_options,
1296 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001297 : CodeGenerator(graph,
1298 kNumberOfCpuRegisters,
1299 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001300 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001301 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1302 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001303 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001304 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1305 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001306 compiler_options,
1307 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001308 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001309 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001310 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001311 move_resolver_(graph->GetAllocator(), this),
1312 assembler_(graph->GetAllocator()),
Vladimir Marko58155012015-08-19 12:49:41 +00001313 constant_area_start_(0),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001314 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1315 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1316 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1317 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001318 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001319 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001320 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001321 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1322 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1323 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001324 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1325}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001326
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001327InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1328 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001329 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001330 assembler_(codegen->GetAssembler()),
1331 codegen_(codegen) {}
1332
David Brazdil58282f42016-01-14 12:45:10 +00001333void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001334 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001335 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001336
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001337 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001338 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001339}
1340
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001341static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001342 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001343}
David Srbecky9d8606d2015-04-12 09:35:32 +01001344
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001345static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001346 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001347}
1348
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001349void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001350 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001351 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001352 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001353 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001354 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001355
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001356 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001357 NearLabel overflow;
1358 __ cmpw(Address(CpuRegister(kMethodRegisterArgument),
1359 ArtMethod::HotnessCountOffset().Int32Value()),
1360 Immediate(ArtMethod::MaxCounter()));
1361 __ j(kEqual, &overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001362 __ addw(Address(CpuRegister(kMethodRegisterArgument),
1363 ArtMethod::HotnessCountOffset().Int32Value()),
1364 Immediate(1));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001365 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001366 }
1367
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001368 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001369 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1370 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001371 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001372 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001373
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001374 if (HasEmptyFrame()) {
1375 return;
1376 }
1377
Nicolas Geoffray98893962015-01-21 12:32:32 +00001378 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001379 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001380 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001381 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001382 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1383 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001384 }
1385 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001386
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001387 int adjust = GetFrameSize() - GetCoreSpillSize();
1388 __ subq(CpuRegister(RSP), Immediate(adjust));
1389 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001390 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001391 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001392
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001393 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1394 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001395 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1396 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1397 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001398 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001399 }
1400
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001401 // Save the current method if we need it. Note that we do not
1402 // do this in HCurrentMethod, as the instruction might have been removed
1403 // in the SSA graph.
1404 if (RequiresCurrentMethod()) {
1405 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1406 CpuRegister(kMethodRegisterArgument));
1407 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001408
1409 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1410 // Initialize should_deoptimize flag to 0.
1411 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1412 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001413}
1414
1415void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001416 __ cfi().RememberState();
1417 if (!HasEmptyFrame()) {
1418 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001419 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
David Srbeckyc34dc932015-04-12 09:27:43 +01001420 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1421 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1422 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1423 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1424 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1425 }
1426 }
1427
1428 int adjust = GetFrameSize() - GetCoreSpillSize();
1429 __ addq(CpuRegister(RSP), Immediate(adjust));
1430 __ cfi().AdjustCFAOffset(-adjust);
1431
1432 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1433 Register reg = kCoreCalleeSaves[i];
1434 if (allocated_registers_.ContainsCoreRegister(reg)) {
1435 __ popq(CpuRegister(reg));
1436 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1437 __ cfi().Restore(DWARFReg(reg));
1438 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001439 }
1440 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001441 __ ret();
1442 __ cfi().RestoreState();
1443 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001444}
1445
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001446void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1447 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001448}
1449
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001450void CodeGeneratorX86_64::Move(Location destination, Location source) {
1451 if (source.Equals(destination)) {
1452 return;
1453 }
1454 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001455 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001456 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001457 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001458 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001459 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001460 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001461 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1462 } else if (source.IsConstant()) {
1463 HConstant* constant = source.GetConstant();
1464 if (constant->IsLongConstant()) {
1465 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1466 } else {
1467 Load32BitValue(dest, GetInt32ValueOf(constant));
1468 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001469 } else {
1470 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001471 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001472 }
1473 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001474 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001475 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001476 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001477 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001478 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1479 } else if (source.IsConstant()) {
1480 HConstant* constant = source.GetConstant();
1481 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1482 if (constant->IsFloatConstant()) {
1483 Load32BitValue(dest, static_cast<int32_t>(value));
1484 } else {
1485 Load64BitValue(dest, value);
1486 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001487 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001488 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001489 } else {
1490 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001491 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001492 }
1493 } else if (destination.IsStackSlot()) {
1494 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001495 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001496 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001497 } else if (source.IsFpuRegister()) {
1498 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001499 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001500 } else if (source.IsConstant()) {
1501 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001502 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001503 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001504 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001505 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001506 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1507 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001508 }
1509 } else {
1510 DCHECK(destination.IsDoubleStackSlot());
1511 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001512 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001513 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001514 } else if (source.IsFpuRegister()) {
1515 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001516 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001517 } else if (source.IsConstant()) {
1518 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001519 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1520 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001521 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001522 } else {
1523 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001524 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1525 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001526 }
1527 }
1528}
1529
Calin Juravle175dc732015-08-25 15:42:32 +01001530void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1531 DCHECK(location.IsRegister());
1532 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1533}
1534
Calin Juravlee460d1d2015-09-29 04:52:17 +01001535void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001536 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001537 Move(dst, src);
1538}
1539
1540void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1541 if (location.IsRegister()) {
1542 locations->AddTemp(location);
1543 } else {
1544 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1545 }
1546}
1547
David Brazdilfc6a86a2015-06-26 10:33:45 +00001548void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001549 if (successor->IsExitBlock()) {
1550 DCHECK(got->GetPrevious()->AlwaysThrows());
1551 return; // no code needed
1552 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001553
1554 HBasicBlock* block = got->GetBlock();
1555 HInstruction* previous = got->GetPrevious();
1556
1557 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001558 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001559 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
1560 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), 0));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001561 NearLabel overflow;
1562 __ cmpw(Address(CpuRegister(TMP), ArtMethod::HotnessCountOffset().Int32Value()),
1563 Immediate(ArtMethod::MaxCounter()));
1564 __ j(kEqual, &overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001565 __ addw(Address(CpuRegister(TMP), ArtMethod::HotnessCountOffset().Int32Value()),
1566 Immediate(1));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001567 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001568 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001569 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1570 return;
1571 }
1572
1573 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1574 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1575 }
1576 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001577 __ jmp(codegen_->GetLabelOf(successor));
1578 }
1579}
1580
David Brazdilfc6a86a2015-06-26 10:33:45 +00001581void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1582 got->SetLocations(nullptr);
1583}
1584
1585void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1586 HandleGoto(got, got->GetSuccessor());
1587}
1588
1589void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1590 try_boundary->SetLocations(nullptr);
1591}
1592
1593void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1594 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1595 if (!successor->IsExitBlock()) {
1596 HandleGoto(try_boundary, successor);
1597 }
1598}
1599
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001600void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1601 exit->SetLocations(nullptr);
1602}
1603
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001604void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001605}
1606
Mark Mendell152408f2015-12-31 12:28:50 -05001607template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001608void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001609 LabelType* true_label,
1610 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001611 if (cond->IsFPConditionTrueIfNaN()) {
1612 __ j(kUnordered, true_label);
1613 } else if (cond->IsFPConditionFalseIfNaN()) {
1614 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001615 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001616 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001617}
1618
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001619void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001620 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001621
Mark Mendellc4701932015-04-10 13:18:51 -04001622 Location left = locations->InAt(0);
1623 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001624 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001625 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001626 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001627 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001628 case DataType::Type::kInt8:
1629 case DataType::Type::kUint16:
1630 case DataType::Type::kInt16:
1631 case DataType::Type::kInt32:
1632 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001633 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001634 break;
1635 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001636 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001637 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001638 break;
1639 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001640 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001641 if (right.IsFpuRegister()) {
1642 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1643 } else if (right.IsConstant()) {
1644 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1645 codegen_->LiteralFloatAddress(
1646 right.GetConstant()->AsFloatConstant()->GetValue()));
1647 } else {
1648 DCHECK(right.IsStackSlot());
1649 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1650 Address(CpuRegister(RSP), right.GetStackIndex()));
1651 }
Mark Mendellc4701932015-04-10 13:18:51 -04001652 break;
1653 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001654 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001655 if (right.IsFpuRegister()) {
1656 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1657 } else if (right.IsConstant()) {
1658 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1659 codegen_->LiteralDoubleAddress(
1660 right.GetConstant()->AsDoubleConstant()->GetValue()));
1661 } else {
1662 DCHECK(right.IsDoubleStackSlot());
1663 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1664 Address(CpuRegister(RSP), right.GetStackIndex()));
1665 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001666 break;
1667 }
1668 default:
1669 LOG(FATAL) << "Unexpected condition type " << type;
1670 }
1671}
1672
1673template<class LabelType>
1674void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1675 LabelType* true_target_in,
1676 LabelType* false_target_in) {
1677 // Generated branching requires both targets to be explicit. If either of the
1678 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1679 LabelType fallthrough_target;
1680 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1681 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1682
1683 // Generate the comparison to set the CC.
1684 GenerateCompareTest(condition);
1685
1686 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001687 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001688 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001689 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001690 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1691 break;
1692 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001693 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001694 GenerateFPJumps(condition, true_target, false_target);
1695 break;
1696 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001697 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001698 GenerateFPJumps(condition, true_target, false_target);
1699 break;
1700 }
1701 default:
1702 LOG(FATAL) << "Unexpected condition type " << type;
1703 }
1704
David Brazdil0debae72015-11-12 18:37:00 +00001705 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001706 __ jmp(false_target);
1707 }
David Brazdil0debae72015-11-12 18:37:00 +00001708
1709 if (fallthrough_target.IsLinked()) {
1710 __ Bind(&fallthrough_target);
1711 }
Mark Mendellc4701932015-04-10 13:18:51 -04001712}
1713
David Brazdil0debae72015-11-12 18:37:00 +00001714static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1715 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1716 // are set only strictly before `branch`. We can't use the eflags on long
1717 // conditions if they are materialized due to the complex branching.
1718 return cond->IsCondition() &&
1719 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001720 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001721}
1722
Mark Mendell152408f2015-12-31 12:28:50 -05001723template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001724void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001725 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001726 LabelType* true_target,
1727 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001728 HInstruction* cond = instruction->InputAt(condition_input_index);
1729
1730 if (true_target == nullptr && false_target == nullptr) {
1731 // Nothing to do. The code always falls through.
1732 return;
1733 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001734 // Constant condition, statically compared against "true" (integer value 1).
1735 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001736 if (true_target != nullptr) {
1737 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001738 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001739 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001740 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001741 if (false_target != nullptr) {
1742 __ jmp(false_target);
1743 }
1744 }
1745 return;
1746 }
1747
1748 // The following code generates these patterns:
1749 // (1) true_target == nullptr && false_target != nullptr
1750 // - opposite condition true => branch to false_target
1751 // (2) true_target != nullptr && false_target == nullptr
1752 // - condition true => branch to true_target
1753 // (3) true_target != nullptr && false_target != nullptr
1754 // - condition true => branch to true_target
1755 // - branch to false_target
1756 if (IsBooleanValueOrMaterializedCondition(cond)) {
1757 if (AreEflagsSetFrom(cond, instruction)) {
1758 if (true_target == nullptr) {
1759 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1760 } else {
1761 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1762 }
1763 } else {
1764 // Materialized condition, compare against 0.
1765 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1766 if (lhs.IsRegister()) {
1767 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1768 } else {
1769 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1770 }
1771 if (true_target == nullptr) {
1772 __ j(kEqual, false_target);
1773 } else {
1774 __ j(kNotEqual, true_target);
1775 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001776 }
1777 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001778 // Condition has not been materialized, use its inputs as the
1779 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001780 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001781
David Brazdil0debae72015-11-12 18:37:00 +00001782 // If this is a long or FP comparison that has been folded into
1783 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001784 DataType::Type type = condition->InputAt(0)->GetType();
1785 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001786 GenerateCompareTestAndBranch(condition, true_target, false_target);
1787 return;
1788 }
1789
1790 Location lhs = condition->GetLocations()->InAt(0);
1791 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001792 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001793 if (true_target == nullptr) {
1794 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1795 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001796 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001797 }
Dave Allison20dfc792014-06-16 20:44:29 -07001798 }
David Brazdil0debae72015-11-12 18:37:00 +00001799
1800 // If neither branch falls through (case 3), the conditional branch to `true_target`
1801 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1802 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001803 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001804 }
1805}
1806
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001807void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001808 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001809 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001810 locations->SetInAt(0, Location::Any());
1811 }
1812}
1813
1814void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001815 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1816 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1817 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1818 nullptr : codegen_->GetLabelOf(true_successor);
1819 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1820 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001821 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001822}
1823
1824void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001825 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001826 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001827 InvokeRuntimeCallingConvention calling_convention;
1828 RegisterSet caller_saves = RegisterSet::Empty();
1829 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1830 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001831 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001832 locations->SetInAt(0, Location::Any());
1833 }
1834}
1835
1836void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001837 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001838 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001839 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001840 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001841 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001842}
1843
Mingyao Yang063fc772016-08-02 11:02:54 -07001844void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001845 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001846 LocationSummary(flag, LocationSummary::kNoCall);
1847 locations->SetOut(Location::RequiresRegister());
1848}
1849
1850void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1851 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1852 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1853}
1854
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001855static bool SelectCanUseCMOV(HSelect* select) {
1856 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001857 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001858 return false;
1859 }
1860
1861 // A FP condition doesn't generate the single CC that we need.
1862 HInstruction* condition = select->GetCondition();
1863 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001864 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001865 return false;
1866 }
1867
1868 // We can generate a CMOV for this Select.
1869 return true;
1870}
1871
David Brazdil74eb1b22015-12-14 11:44:01 +00001872void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001873 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001874 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001875 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001876 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001877 } else {
1878 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001879 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001880 if (select->InputAt(1)->IsConstant()) {
1881 locations->SetInAt(1, Location::RequiresRegister());
1882 } else {
1883 locations->SetInAt(1, Location::Any());
1884 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001885 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001886 locations->SetInAt(1, Location::Any());
1887 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001888 }
1889 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1890 locations->SetInAt(2, Location::RequiresRegister());
1891 }
1892 locations->SetOut(Location::SameAsFirstInput());
1893}
1894
1895void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1896 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001897 if (SelectCanUseCMOV(select)) {
1898 // If both the condition and the source types are integer, we can generate
1899 // a CMOV to implement Select.
1900 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001901 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001902 DCHECK(locations->InAt(0).Equals(locations->Out()));
1903
1904 HInstruction* select_condition = select->GetCondition();
1905 Condition cond = kNotEqual;
1906
1907 // Figure out how to test the 'condition'.
1908 if (select_condition->IsCondition()) {
1909 HCondition* condition = select_condition->AsCondition();
1910 if (!condition->IsEmittedAtUseSite()) {
1911 // This was a previously materialized condition.
1912 // Can we use the existing condition code?
1913 if (AreEflagsSetFrom(condition, select)) {
1914 // Materialization was the previous instruction. Condition codes are right.
1915 cond = X86_64IntegerCondition(condition->GetCondition());
1916 } else {
1917 // No, we have to recreate the condition code.
1918 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1919 __ testl(cond_reg, cond_reg);
1920 }
1921 } else {
1922 GenerateCompareTest(condition);
1923 cond = X86_64IntegerCondition(condition->GetCondition());
1924 }
1925 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001926 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001927 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1928 __ testl(cond_reg, cond_reg);
1929 }
1930
1931 // If the condition is true, overwrite the output, which already contains false.
1932 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001933 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001934 if (value_true_loc.IsRegister()) {
1935 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1936 } else {
1937 __ cmov(cond,
1938 value_false,
1939 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1940 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001941 } else {
1942 NearLabel false_target;
1943 GenerateTestAndBranch<NearLabel>(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08001944 /* condition_input_index= */ 2,
1945 /* true_target= */ nullptr,
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001946 &false_target);
1947 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1948 __ Bind(&false_target);
1949 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001950}
1951
David Srbecky0cf44932015-12-09 14:09:59 +00001952void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001953 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001954}
1955
David Srbeckyd28f4a02016-03-14 17:14:24 +00001956void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1957 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001958}
1959
1960void CodeGeneratorX86_64::GenerateNop() {
1961 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001962}
1963
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001964void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001965 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001966 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001967 // Handle the long/FP comparisons made in instruction simplification.
1968 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001969 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001970 locations->SetInAt(0, Location::RequiresRegister());
1971 locations->SetInAt(1, Location::Any());
1972 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001973 case DataType::Type::kFloat32:
1974 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04001975 locations->SetInAt(0, Location::RequiresFpuRegister());
1976 locations->SetInAt(1, Location::Any());
1977 break;
1978 default:
1979 locations->SetInAt(0, Location::RequiresRegister());
1980 locations->SetInAt(1, Location::Any());
1981 break;
1982 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001983 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001984 locations->SetOut(Location::RequiresRegister());
1985 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001986}
1987
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001988void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001989 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001990 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001991 }
Mark Mendellc4701932015-04-10 13:18:51 -04001992
1993 LocationSummary* locations = cond->GetLocations();
1994 Location lhs = locations->InAt(0);
1995 Location rhs = locations->InAt(1);
1996 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001997 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001998
1999 switch (cond->InputAt(0)->GetType()) {
2000 default:
2001 // Integer case.
2002
2003 // Clear output register: setcc only sets the low byte.
2004 __ xorl(reg, reg);
2005
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002006 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002007 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002008 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002009 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002010 // Clear output register: setcc only sets the low byte.
2011 __ xorl(reg, reg);
2012
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002013 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002014 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002015 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002016 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04002017 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2018 if (rhs.IsConstant()) {
2019 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
2020 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
2021 } else if (rhs.IsStackSlot()) {
2022 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2023 } else {
2024 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2025 }
2026 GenerateFPJumps(cond, &true_label, &false_label);
2027 break;
2028 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002029 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002030 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2031 if (rhs.IsConstant()) {
2032 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
2033 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
2034 } else if (rhs.IsDoubleStackSlot()) {
2035 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2036 } else {
2037 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2038 }
2039 GenerateFPJumps(cond, &true_label, &false_label);
2040 break;
2041 }
2042 }
2043
2044 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002045 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002046
Roland Levillain4fa13f62015-07-06 18:11:54 +01002047 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002048 __ Bind(&false_label);
2049 __ xorl(reg, reg);
2050 __ jmp(&done_label);
2051
Roland Levillain4fa13f62015-07-06 18:11:54 +01002052 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002053 __ Bind(&true_label);
2054 __ movl(reg, Immediate(1));
2055 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002056}
2057
2058void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002059 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002060}
2061
2062void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002063 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002064}
2065
2066void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002067 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002068}
2069
2070void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002071 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002072}
2073
2074void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002075 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002076}
2077
2078void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002079 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002080}
2081
2082void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002083 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002084}
2085
2086void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002087 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002088}
2089
2090void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002091 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002092}
2093
2094void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002095 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002096}
2097
2098void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002099 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002100}
2101
2102void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002103 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002104}
2105
Aart Bike9f37602015-10-09 11:15:55 -07002106void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002107 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002108}
2109
2110void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002111 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002112}
2113
2114void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002115 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002116}
2117
2118void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002119 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002120}
2121
2122void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002123 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002124}
2125
2126void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002127 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002128}
2129
2130void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002131 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002132}
2133
2134void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002135 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002136}
2137
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002138void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002139 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002140 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002141 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002142 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002143 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002144 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002145 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002146 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002147 case DataType::Type::kInt32:
2148 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002149 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002150 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002151 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2152 break;
2153 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002154 case DataType::Type::kFloat32:
2155 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002156 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002157 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002158 locations->SetOut(Location::RequiresRegister());
2159 break;
2160 }
2161 default:
2162 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2163 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002164}
2165
2166void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002167 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002168 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002169 Location left = locations->InAt(0);
2170 Location right = locations->InAt(1);
2171
Mark Mendell0c9497d2015-08-21 09:30:05 -04002172 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002173 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002174 Condition less_cond = kLess;
2175
Calin Juravleddb7df22014-11-25 20:56:51 +00002176 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002177 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002178 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002179 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002180 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002181 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002182 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002183 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002184 break;
2185 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002186 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002187 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002188 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002189 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002190 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002191 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2192 if (right.IsConstant()) {
2193 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2194 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2195 } else if (right.IsStackSlot()) {
2196 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2197 } else {
2198 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2199 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002200 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002201 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002202 break;
2203 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002204 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002205 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2206 if (right.IsConstant()) {
2207 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2208 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2209 } else if (right.IsDoubleStackSlot()) {
2210 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2211 } else {
2212 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2213 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002214 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002215 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002216 break;
2217 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002218 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002219 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002220 }
Aart Bika19616e2016-02-01 18:57:58 -08002221
Calin Juravleddb7df22014-11-25 20:56:51 +00002222 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002223 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002224 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002225
Calin Juravle91debbc2014-11-26 19:01:09 +00002226 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002227 __ movl(out, Immediate(1));
2228 __ jmp(&done);
2229
2230 __ Bind(&less);
2231 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002232
2233 __ Bind(&done);
2234}
2235
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002236void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002237 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002238 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002239 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002240}
2241
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002242void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002243 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002244}
2245
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002246void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2247 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002248 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002249 locations->SetOut(Location::ConstantLocation(constant));
2250}
2251
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002252void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002253 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002254}
2255
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002256void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002257 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002258 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002259 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002260}
2261
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002262void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002263 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002264}
2265
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002266void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2267 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002268 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002269 locations->SetOut(Location::ConstantLocation(constant));
2270}
2271
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002272void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002273 // Will be generated at use site.
2274}
2275
2276void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2277 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002278 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002279 locations->SetOut(Location::ConstantLocation(constant));
2280}
2281
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002282void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2283 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002284 // Will be generated at use site.
2285}
2286
Igor Murashkind01745e2017-04-05 16:40:31 -07002287void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2288 constructor_fence->SetLocations(nullptr);
2289}
2290
2291void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2292 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2293 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2294}
2295
Calin Juravle27df7582015-04-17 19:12:31 +01002296void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2297 memory_barrier->SetLocations(nullptr);
2298}
2299
2300void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002301 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002302}
2303
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002304void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2305 ret->SetLocations(nullptr);
2306}
2307
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002308void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002309 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002310}
2311
2312void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002313 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002314 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002315 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002316 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002317 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002318 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002319 case DataType::Type::kInt8:
2320 case DataType::Type::kUint16:
2321 case DataType::Type::kInt16:
2322 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002323 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002324 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002325 break;
2326
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002327 case DataType::Type::kFloat32:
2328 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002329 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002330 break;
2331
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002332 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002333 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002334 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002335}
2336
2337void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2338 if (kIsDebugBuild) {
2339 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002340 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002341 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002342 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002343 case DataType::Type::kInt8:
2344 case DataType::Type::kUint16:
2345 case DataType::Type::kInt16:
2346 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002347 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002348 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002349 break;
2350
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002351 case DataType::Type::kFloat32:
2352 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002353 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002354 XMM0);
2355 break;
2356
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002357 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002358 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002359 }
2360 }
2361 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002362}
2363
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002364Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002365 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002366 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002367 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002368 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002369 case DataType::Type::kInt8:
2370 case DataType::Type::kUint16:
2371 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002372 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002373 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002374 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002375 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002376 return Location::RegisterLocation(RAX);
2377
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002378 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002379 return Location::NoLocation();
2380
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002381 case DataType::Type::kFloat64:
2382 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002383 return Location::FpuRegisterLocation(XMM0);
2384 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002385
2386 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002387}
2388
2389Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2390 return Location::RegisterLocation(kMethodRegisterArgument);
2391}
2392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002393Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002394 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002395 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002396 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002397 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002398 case DataType::Type::kInt8:
2399 case DataType::Type::kUint16:
2400 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002401 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002402 uint32_t index = gp_index_++;
2403 stack_index_++;
2404 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002405 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002406 } else {
2407 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2408 }
2409 }
2410
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002411 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002412 uint32_t index = gp_index_;
2413 stack_index_ += 2;
2414 if (index < calling_convention.GetNumberOfRegisters()) {
2415 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002416 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002417 } else {
2418 gp_index_ += 2;
2419 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2420 }
2421 }
2422
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002423 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002424 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002425 stack_index_++;
2426 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002427 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002428 } else {
2429 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2430 }
2431 }
2432
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002433 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002434 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002435 stack_index_ += 2;
2436 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002437 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002438 } else {
2439 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2440 }
2441 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002442
Aart Bik66c158e2018-01-31 12:55:04 -08002443 case DataType::Type::kUint32:
2444 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002445 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002446 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002447 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002448 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002449 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002450}
2451
Calin Juravle175dc732015-08-25 15:42:32 +01002452void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2453 // The trampoline uses the same calling convention as dex calling conventions,
2454 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2455 // the method_idx.
2456 HandleInvoke(invoke);
2457}
2458
2459void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2460 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2461}
2462
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002463void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002464 // Explicit clinit checks triggered by static invokes must have been pruned by
2465 // art::PrepareForRegisterAllocation.
2466 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002467
Mark Mendellfb8d2792015-03-31 22:16:59 -04002468 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002469 if (intrinsic.TryDispatch(invoke)) {
2470 return;
2471 }
2472
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002473 HandleInvoke(invoke);
2474}
2475
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002476static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2477 if (invoke->GetLocations()->Intrinsified()) {
2478 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2479 intrinsic.Dispatch(invoke);
2480 return true;
2481 }
2482 return false;
2483}
2484
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002485void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002486 // Explicit clinit checks triggered by static invokes must have been pruned by
2487 // art::PrepareForRegisterAllocation.
2488 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002489
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002490 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2491 return;
2492 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002493
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002494 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002495 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002496 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002497}
2498
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002499void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002500 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002501 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002502}
2503
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002504void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002505 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002506 if (intrinsic.TryDispatch(invoke)) {
2507 return;
2508 }
2509
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002510 HandleInvoke(invoke);
2511}
2512
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002513void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002514 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2515 return;
2516 }
2517
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002518 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002519 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002520}
2521
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002522void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2523 HandleInvoke(invoke);
2524 // Add the hidden argument.
2525 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2526}
2527
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002528void CodeGeneratorX86_64::MaybeGenerateInlineCacheCheck(HInstruction* instruction,
2529 CpuRegister klass) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002530 DCHECK_EQ(RDI, klass.AsRegister());
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002531 // We know the destination of an intrinsic, so no need to record inline
2532 // caches.
2533 if (!instruction->GetLocations()->Intrinsified() &&
2534 GetCompilerOptions().IsBaseline() &&
2535 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002536 ScopedObjectAccess soa(Thread::Current());
2537 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002538 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002539 uint64_t address = reinterpret_cast64<uint64_t>(cache);
2540 NearLabel done;
2541 __ movq(CpuRegister(TMP), Immediate(address));
2542 // Fast path for a monomorphic cache.
2543 __ cmpl(Address(CpuRegister(TMP), InlineCache::ClassesOffset().Int32Value()), klass);
2544 __ j(kEqual, &done);
2545 GenerateInvokeRuntime(
2546 GetThreadOffset<kX86_64PointerSize>(kQuickUpdateInlineCache).Int32Value());
2547 __ Bind(&done);
2548 }
2549}
2550
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002551void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2552 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002553 LocationSummary* locations = invoke->GetLocations();
2554 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2555 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002556 Location receiver = locations->InAt(0);
2557 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2558
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002559 if (receiver.IsStackSlot()) {
2560 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002561 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002562 __ movl(temp, Address(temp, class_offset));
2563 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002564 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002565 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002566 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002567 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002568 // Instead of simply (possibly) unpoisoning `temp` here, we should
2569 // emit a read barrier for the previous class reference load.
2570 // However this is not required in practice, as this is an
2571 // intermediate/temporary reference and because the current
2572 // concurrent copying collector keeps the from-space memory
2573 // intact/accessible until the end of the marking phase (the
2574 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002575 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002576
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002577 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002578
2579 // Set the hidden argument. This is safe to do this here, as RAX
2580 // won't be modified thereafter, before the `call` instruction.
2581 // We also di it after MaybeGenerateInlineCache that may use RAX.
2582 DCHECK_EQ(RAX, hidden_reg.AsRegister());
2583 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
2584
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002585 // temp = temp->GetAddressOfIMT()
2586 __ movq(temp,
2587 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2588 // temp = temp->GetImtEntryAt(method_offset);
2589 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002590 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002591 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002592 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002593 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002594 __ call(Address(
2595 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002596
2597 DCHECK(!codegen_->IsLeafMethod());
2598 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2599}
2600
Orion Hodsonac141392017-01-13 11:53:47 +00002601void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2602 HandleInvoke(invoke);
2603}
2604
2605void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2606 codegen_->GenerateInvokePolymorphicCall(invoke);
2607}
2608
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002609void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2610 HandleInvoke(invoke);
2611}
2612
2613void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2614 codegen_->GenerateInvokeCustomCall(invoke);
2615}
2616
Roland Levillain88cb1752014-10-20 16:36:47 +01002617void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2618 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002619 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002620 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002621 case DataType::Type::kInt32:
2622 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002623 locations->SetInAt(0, Location::RequiresRegister());
2624 locations->SetOut(Location::SameAsFirstInput());
2625 break;
2626
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002627 case DataType::Type::kFloat32:
2628 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002629 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002630 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002631 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002632 break;
2633
2634 default:
2635 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2636 }
2637}
2638
2639void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2640 LocationSummary* locations = neg->GetLocations();
2641 Location out = locations->Out();
2642 Location in = locations->InAt(0);
2643 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002644 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002645 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002646 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002647 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002648 break;
2649
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002650 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002651 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002652 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002653 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002654 break;
2655
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002656 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002657 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002658 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002659 // Implement float negation with an exclusive or with value
2660 // 0x80000000 (mask for bit 31, representing the sign of a
2661 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002662 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002663 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002664 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002665 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002666
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002667 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002668 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002669 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002670 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002671 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002672 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002673 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002674 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002675 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002676 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002677
2678 default:
2679 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2680 }
2681}
2682
Roland Levillaindff1f282014-11-05 14:15:05 +00002683void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2684 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002685 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002686 DataType::Type result_type = conversion->GetResultType();
2687 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002688 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2689 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002690
Roland Levillaindff1f282014-11-05 14:15:05 +00002691 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002692 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002693 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002694 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002695 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002696 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2697 locations->SetInAt(0, Location::Any());
2698 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002699 break;
2700
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002701 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002702 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002703 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002704 locations->SetInAt(0, Location::Any());
2705 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2706 break;
2707
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002708 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002709 locations->SetInAt(0, Location::RequiresFpuRegister());
2710 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002711 break;
2712
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002713 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002714 locations->SetInAt(0, Location::RequiresFpuRegister());
2715 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002716 break;
2717
2718 default:
2719 LOG(FATAL) << "Unexpected type conversion from " << input_type
2720 << " to " << result_type;
2721 }
2722 break;
2723
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002724 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002725 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002726 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002727 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002728 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002729 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002730 case DataType::Type::kInt16:
2731 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002732 // TODO: We would benefit from a (to-be-implemented)
2733 // Location::RegisterOrStackSlot requirement for this input.
2734 locations->SetInAt(0, Location::RequiresRegister());
2735 locations->SetOut(Location::RequiresRegister());
2736 break;
2737
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002738 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002739 locations->SetInAt(0, Location::RequiresFpuRegister());
2740 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002741 break;
2742
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002743 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002744 locations->SetInAt(0, Location::RequiresFpuRegister());
2745 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002746 break;
2747
2748 default:
2749 LOG(FATAL) << "Unexpected type conversion from " << input_type
2750 << " to " << result_type;
2751 }
2752 break;
2753
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002754 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002755 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002756 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002757 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002758 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002759 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002760 case DataType::Type::kInt16:
2761 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002762 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002763 locations->SetOut(Location::RequiresFpuRegister());
2764 break;
2765
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002766 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002767 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002768 locations->SetOut(Location::RequiresFpuRegister());
2769 break;
2770
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002771 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002772 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002773 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002774 break;
2775
2776 default:
2777 LOG(FATAL) << "Unexpected type conversion from " << input_type
2778 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002779 }
Roland Levillaincff13742014-11-17 14:32:17 +00002780 break;
2781
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002782 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002783 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002784 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002785 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002786 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002787 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002788 case DataType::Type::kInt16:
2789 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002790 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002791 locations->SetOut(Location::RequiresFpuRegister());
2792 break;
2793
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002794 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002795 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002796 locations->SetOut(Location::RequiresFpuRegister());
2797 break;
2798
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002799 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002800 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002801 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002802 break;
2803
2804 default:
2805 LOG(FATAL) << "Unexpected type conversion from " << input_type
2806 << " to " << result_type;
2807 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002808 break;
2809
2810 default:
2811 LOG(FATAL) << "Unexpected type conversion from " << input_type
2812 << " to " << result_type;
2813 }
2814}
2815
2816void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2817 LocationSummary* locations = conversion->GetLocations();
2818 Location out = locations->Out();
2819 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002820 DataType::Type result_type = conversion->GetResultType();
2821 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002822 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2823 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002824 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002825 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002826 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002827 case DataType::Type::kInt8:
2828 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002829 case DataType::Type::kInt16:
2830 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002831 case DataType::Type::kInt64:
2832 if (in.IsRegister()) {
2833 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2834 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2835 __ movzxb(out.AsRegister<CpuRegister>(),
2836 Address(CpuRegister(RSP), in.GetStackIndex()));
2837 } else {
2838 __ movl(out.AsRegister<CpuRegister>(),
2839 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2840 }
2841 break;
2842
2843 default:
2844 LOG(FATAL) << "Unexpected type conversion from " << input_type
2845 << " to " << result_type;
2846 }
2847 break;
2848
2849 case DataType::Type::kInt8:
2850 switch (input_type) {
2851 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002852 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002853 case DataType::Type::kInt16:
2854 case DataType::Type::kInt32:
2855 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002856 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002857 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002858 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002859 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002860 Address(CpuRegister(RSP), in.GetStackIndex()));
2861 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002862 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002863 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002864 }
2865 break;
2866
2867 default:
2868 LOG(FATAL) << "Unexpected type conversion from " << input_type
2869 << " to " << result_type;
2870 }
2871 break;
2872
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002873 case DataType::Type::kUint16:
2874 switch (input_type) {
2875 case DataType::Type::kInt8:
2876 case DataType::Type::kInt16:
2877 case DataType::Type::kInt32:
2878 case DataType::Type::kInt64:
2879 if (in.IsRegister()) {
2880 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2881 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2882 __ movzxw(out.AsRegister<CpuRegister>(),
2883 Address(CpuRegister(RSP), in.GetStackIndex()));
2884 } else {
2885 __ movl(out.AsRegister<CpuRegister>(),
2886 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2887 }
2888 break;
2889
2890 default:
2891 LOG(FATAL) << "Unexpected type conversion from " << input_type
2892 << " to " << result_type;
2893 }
2894 break;
2895
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002896 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002897 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002898 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002899 case DataType::Type::kInt32:
2900 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00002901 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002902 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002903 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002904 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002905 Address(CpuRegister(RSP), in.GetStackIndex()));
2906 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002907 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002908 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002909 }
2910 break;
2911
2912 default:
2913 LOG(FATAL) << "Unexpected type conversion from " << input_type
2914 << " to " << result_type;
2915 }
2916 break;
2917
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002918 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002919 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002920 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002921 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002922 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002923 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002924 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002925 Address(CpuRegister(RSP), in.GetStackIndex()));
2926 } else {
2927 DCHECK(in.IsConstant());
2928 DCHECK(in.GetConstant()->IsLongConstant());
2929 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002930 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002931 }
2932 break;
2933
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002934 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002935 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2936 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002937 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002938
2939 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002940 // if input >= (float)INT_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07002941 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimIntMax)));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002942 __ j(kAboveEqual, &done);
2943 // if input == NaN goto nan
2944 __ j(kUnordered, &nan);
2945 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002946 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002947 __ jmp(&done);
2948 __ Bind(&nan);
2949 // output = 0
2950 __ xorl(output, output);
2951 __ Bind(&done);
2952 break;
2953 }
2954
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002955 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002956 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2957 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002958 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002959
2960 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002961 // if input >= (double)INT_MAX goto done
2962 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002963 __ j(kAboveEqual, &done);
2964 // if input == NaN goto nan
2965 __ j(kUnordered, &nan);
2966 // output = double-to-int-truncate(input)
2967 __ cvttsd2si(output, input);
2968 __ jmp(&done);
2969 __ Bind(&nan);
2970 // output = 0
2971 __ xorl(output, output);
2972 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002973 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002974 }
Roland Levillain946e1432014-11-11 17:35:19 +00002975
2976 default:
2977 LOG(FATAL) << "Unexpected type conversion from " << input_type
2978 << " to " << result_type;
2979 }
2980 break;
2981
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002982 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002983 switch (input_type) {
2984 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002985 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002986 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002987 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002988 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002989 case DataType::Type::kInt16:
2990 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002991 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002992 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002993 break;
2994
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002995 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00002996 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2997 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002998 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002999
Mark Mendell92e83bf2015-05-07 11:25:03 -04003000 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003001 // if input >= (float)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003002 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimLongMax)));
Roland Levillain624279f2014-12-04 11:54:28 +00003003 __ j(kAboveEqual, &done);
3004 // if input == NaN goto nan
3005 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003006 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003007 __ cvttss2si(output, input, true);
3008 __ jmp(&done);
3009 __ Bind(&nan);
3010 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003011 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00003012 __ Bind(&done);
3013 break;
3014 }
3015
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003016 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003017 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3018 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003019 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003020
Mark Mendell92e83bf2015-05-07 11:25:03 -04003021 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003022 // if input >= (double)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003023 __ comisd(input, codegen_->LiteralDoubleAddress(
3024 static_cast<double>(kPrimLongMax)));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003025 __ j(kAboveEqual, &done);
3026 // if input == NaN goto nan
3027 __ j(kUnordered, &nan);
3028 // output = double-to-long-truncate(input)
3029 __ cvttsd2si(output, input, true);
3030 __ jmp(&done);
3031 __ Bind(&nan);
3032 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003033 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003034 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00003035 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003036 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003037
3038 default:
3039 LOG(FATAL) << "Unexpected type conversion from " << input_type
3040 << " to " << result_type;
3041 }
3042 break;
3043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003044 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003045 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003046 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003047 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003048 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003049 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003050 case DataType::Type::kInt16:
3051 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003052 if (in.IsRegister()) {
3053 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3054 } else if (in.IsConstant()) {
3055 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3056 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003057 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003058 } else {
3059 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3060 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3061 }
Roland Levillaincff13742014-11-17 14:32:17 +00003062 break;
3063
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003064 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003065 if (in.IsRegister()) {
3066 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3067 } else if (in.IsConstant()) {
3068 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3069 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06003070 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003071 } else {
3072 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3073 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3074 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003075 break;
3076
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003077 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003078 if (in.IsFpuRegister()) {
3079 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3080 } else if (in.IsConstant()) {
3081 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
3082 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003083 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003084 } else {
3085 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
3086 Address(CpuRegister(RSP), in.GetStackIndex()));
3087 }
Roland Levillaincff13742014-11-17 14:32:17 +00003088 break;
3089
3090 default:
3091 LOG(FATAL) << "Unexpected type conversion from " << input_type
3092 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003093 }
Roland Levillaincff13742014-11-17 14:32:17 +00003094 break;
3095
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003096 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003097 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003098 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003099 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003100 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003101 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003102 case DataType::Type::kInt16:
3103 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003104 if (in.IsRegister()) {
3105 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3106 } else if (in.IsConstant()) {
3107 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3108 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003109 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003110 } else {
3111 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3112 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3113 }
Roland Levillaincff13742014-11-17 14:32:17 +00003114 break;
3115
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003116 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003117 if (in.IsRegister()) {
3118 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3119 } else if (in.IsConstant()) {
3120 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3121 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003122 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003123 } else {
3124 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3125 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3126 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003127 break;
3128
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003129 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003130 if (in.IsFpuRegister()) {
3131 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3132 } else if (in.IsConstant()) {
3133 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3134 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003135 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003136 } else {
3137 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3138 Address(CpuRegister(RSP), in.GetStackIndex()));
3139 }
Roland Levillaincff13742014-11-17 14:32:17 +00003140 break;
3141
3142 default:
3143 LOG(FATAL) << "Unexpected type conversion from " << input_type
3144 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003145 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003146 break;
3147
3148 default:
3149 LOG(FATAL) << "Unexpected type conversion from " << input_type
3150 << " to " << result_type;
3151 }
3152}
3153
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003154void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003155 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003156 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003157 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003158 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003159 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003160 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3161 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003162 break;
3163 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003164
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003165 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003166 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003167 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003168 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003169 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003170 break;
3171 }
3172
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003173 case DataType::Type::kFloat64:
3174 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003175 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003176 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003177 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003178 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003179 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003180
3181 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003182 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003183 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003184}
3185
3186void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3187 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003188 Location first = locations->InAt(0);
3189 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003190 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003191
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003192 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003193 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003194 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003195 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3196 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003197 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3198 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003199 } else {
3200 __ leal(out.AsRegister<CpuRegister>(), Address(
3201 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3202 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003203 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003204 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3205 __ addl(out.AsRegister<CpuRegister>(),
3206 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3207 } else {
3208 __ leal(out.AsRegister<CpuRegister>(), Address(
3209 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3210 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003211 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003212 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003213 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003214 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003215 break;
3216 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003217
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003218 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003219 if (second.IsRegister()) {
3220 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3221 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003222 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3223 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003224 } else {
3225 __ leaq(out.AsRegister<CpuRegister>(), Address(
3226 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3227 }
3228 } else {
3229 DCHECK(second.IsConstant());
3230 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3231 int32_t int32_value = Low32Bits(value);
3232 DCHECK_EQ(int32_value, value);
3233 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3234 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3235 } else {
3236 __ leaq(out.AsRegister<CpuRegister>(), Address(
3237 first.AsRegister<CpuRegister>(), int32_value));
3238 }
3239 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003240 break;
3241 }
3242
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003243 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003244 if (second.IsFpuRegister()) {
3245 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3246 } else if (second.IsConstant()) {
3247 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003248 codegen_->LiteralFloatAddress(
3249 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003250 } else {
3251 DCHECK(second.IsStackSlot());
3252 __ addss(first.AsFpuRegister<XmmRegister>(),
3253 Address(CpuRegister(RSP), second.GetStackIndex()));
3254 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003255 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003256 }
3257
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003258 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003259 if (second.IsFpuRegister()) {
3260 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3261 } else if (second.IsConstant()) {
3262 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003263 codegen_->LiteralDoubleAddress(
3264 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003265 } else {
3266 DCHECK(second.IsDoubleStackSlot());
3267 __ addsd(first.AsFpuRegister<XmmRegister>(),
3268 Address(CpuRegister(RSP), second.GetStackIndex()));
3269 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003270 break;
3271 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003272
3273 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003274 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003275 }
3276}
3277
3278void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003279 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003280 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003281 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003282 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003283 locations->SetInAt(0, Location::RequiresRegister());
3284 locations->SetInAt(1, Location::Any());
3285 locations->SetOut(Location::SameAsFirstInput());
3286 break;
3287 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003288 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003289 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003290 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003291 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003292 break;
3293 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003294 case DataType::Type::kFloat32:
3295 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003296 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003297 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003298 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003299 break;
Calin Juravle11351682014-10-23 15:38:15 +01003300 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003301 default:
Calin Juravle11351682014-10-23 15:38:15 +01003302 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003303 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003304}
3305
3306void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3307 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003308 Location first = locations->InAt(0);
3309 Location second = locations->InAt(1);
3310 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003311 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003312 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003313 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003314 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003315 } else if (second.IsConstant()) {
3316 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003317 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003318 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003319 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003320 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003321 break;
3322 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003323 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003324 if (second.IsConstant()) {
3325 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3326 DCHECK(IsInt<32>(value));
3327 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3328 } else {
3329 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3330 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003331 break;
3332 }
3333
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003334 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003335 if (second.IsFpuRegister()) {
3336 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3337 } else if (second.IsConstant()) {
3338 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003339 codegen_->LiteralFloatAddress(
3340 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003341 } else {
3342 DCHECK(second.IsStackSlot());
3343 __ subss(first.AsFpuRegister<XmmRegister>(),
3344 Address(CpuRegister(RSP), second.GetStackIndex()));
3345 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003346 break;
Calin Juravle11351682014-10-23 15:38:15 +01003347 }
3348
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003349 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003350 if (second.IsFpuRegister()) {
3351 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3352 } else if (second.IsConstant()) {
3353 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003354 codegen_->LiteralDoubleAddress(
3355 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003356 } else {
3357 DCHECK(second.IsDoubleStackSlot());
3358 __ subsd(first.AsFpuRegister<XmmRegister>(),
3359 Address(CpuRegister(RSP), second.GetStackIndex()));
3360 }
Calin Juravle11351682014-10-23 15:38:15 +01003361 break;
3362 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003363
3364 default:
Calin Juravle11351682014-10-23 15:38:15 +01003365 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003366 }
3367}
3368
Calin Juravle34bacdf2014-10-07 20:23:36 +01003369void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3370 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003371 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003372 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003373 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003374 locations->SetInAt(0, Location::RequiresRegister());
3375 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003376 if (mul->InputAt(1)->IsIntConstant()) {
3377 // Can use 3 operand multiply.
3378 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3379 } else {
3380 locations->SetOut(Location::SameAsFirstInput());
3381 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003382 break;
3383 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003384 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003385 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003386 locations->SetInAt(1, Location::Any());
3387 if (mul->InputAt(1)->IsLongConstant() &&
3388 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003389 // Can use 3 operand multiply.
3390 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3391 } else {
3392 locations->SetOut(Location::SameAsFirstInput());
3393 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003394 break;
3395 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003396 case DataType::Type::kFloat32:
3397 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003398 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003399 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003400 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003401 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003402 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003403
3404 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003405 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003406 }
3407}
3408
3409void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3410 LocationSummary* locations = mul->GetLocations();
3411 Location first = locations->InAt(0);
3412 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003413 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003414 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003415 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003416 // The constant may have ended up in a register, so test explicitly to avoid
3417 // problems where the output may not be the same as the first operand.
3418 if (mul->InputAt(1)->IsIntConstant()) {
3419 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3420 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3421 } else if (second.IsRegister()) {
3422 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003423 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003424 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003425 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003426 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003427 __ imull(first.AsRegister<CpuRegister>(),
3428 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003429 }
3430 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003431 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003432 // The constant may have ended up in a register, so test explicitly to avoid
3433 // problems where the output may not be the same as the first operand.
3434 if (mul->InputAt(1)->IsLongConstant()) {
3435 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3436 if (IsInt<32>(value)) {
3437 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3438 Immediate(static_cast<int32_t>(value)));
3439 } else {
3440 // Have to use the constant area.
3441 DCHECK(first.Equals(out));
3442 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3443 }
3444 } else if (second.IsRegister()) {
3445 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003446 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003447 } else {
3448 DCHECK(second.IsDoubleStackSlot());
3449 DCHECK(first.Equals(out));
3450 __ imulq(first.AsRegister<CpuRegister>(),
3451 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003452 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003453 break;
3454 }
3455
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003456 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003457 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003458 if (second.IsFpuRegister()) {
3459 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3460 } else if (second.IsConstant()) {
3461 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003462 codegen_->LiteralFloatAddress(
3463 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003464 } else {
3465 DCHECK(second.IsStackSlot());
3466 __ mulss(first.AsFpuRegister<XmmRegister>(),
3467 Address(CpuRegister(RSP), second.GetStackIndex()));
3468 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003469 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003470 }
3471
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003472 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003473 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003474 if (second.IsFpuRegister()) {
3475 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3476 } else if (second.IsConstant()) {
3477 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003478 codegen_->LiteralDoubleAddress(
3479 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003480 } else {
3481 DCHECK(second.IsDoubleStackSlot());
3482 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3483 Address(CpuRegister(RSP), second.GetStackIndex()));
3484 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003485 break;
3486 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003487
3488 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003489 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003490 }
3491}
3492
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003493void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3494 uint32_t stack_adjustment, bool is_float) {
3495 if (source.IsStackSlot()) {
3496 DCHECK(is_float);
3497 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3498 } else if (source.IsDoubleStackSlot()) {
3499 DCHECK(!is_float);
3500 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3501 } else {
3502 // Write the value to the temporary location on the stack and load to FP stack.
3503 if (is_float) {
3504 Location stack_temp = Location::StackSlot(temp_offset);
3505 codegen_->Move(stack_temp, source);
3506 __ flds(Address(CpuRegister(RSP), temp_offset));
3507 } else {
3508 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3509 codegen_->Move(stack_temp, source);
3510 __ fldl(Address(CpuRegister(RSP), temp_offset));
3511 }
3512 }
3513}
3514
3515void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003516 DataType::Type type = rem->GetResultType();
3517 bool is_float = type == DataType::Type::kFloat32;
3518 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003519 LocationSummary* locations = rem->GetLocations();
3520 Location first = locations->InAt(0);
3521 Location second = locations->InAt(1);
3522 Location out = locations->Out();
3523
3524 // Create stack space for 2 elements.
3525 // TODO: enhance register allocator to ask for stack temporaries.
3526 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3527
3528 // Load the values to the FP stack in reverse order, using temporaries if needed.
3529 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3530 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3531
3532 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003533 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003534 __ Bind(&retry);
3535 __ fprem();
3536
3537 // Move FP status to AX.
3538 __ fstsw();
3539
3540 // And see if the argument reduction is complete. This is signaled by the
3541 // C2 FPU flag bit set to 0.
3542 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3543 __ j(kNotEqual, &retry);
3544
3545 // We have settled on the final value. Retrieve it into an XMM register.
3546 // Store FP top of stack to real stack.
3547 if (is_float) {
3548 __ fsts(Address(CpuRegister(RSP), 0));
3549 } else {
3550 __ fstl(Address(CpuRegister(RSP), 0));
3551 }
3552
3553 // Pop the 2 items from the FP stack.
3554 __ fucompp();
3555
3556 // Load the value from the stack into an XMM register.
3557 DCHECK(out.IsFpuRegister()) << out;
3558 if (is_float) {
3559 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3560 } else {
3561 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3562 }
3563
3564 // And remove the temporary stack space we allocated.
3565 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3566}
3567
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003568void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3569 DCHECK(instruction->IsDiv() || instruction->IsRem());
3570
3571 LocationSummary* locations = instruction->GetLocations();
3572 Location second = locations->InAt(1);
3573 DCHECK(second.IsConstant());
3574
3575 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3576 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003577 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578
3579 DCHECK(imm == 1 || imm == -1);
3580
3581 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003582 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003583 if (instruction->IsRem()) {
3584 __ xorl(output_register, output_register);
3585 } else {
3586 __ movl(output_register, input_register);
3587 if (imm == -1) {
3588 __ negl(output_register);
3589 }
3590 }
3591 break;
3592 }
3593
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003594 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003595 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003596 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003597 } else {
3598 __ movq(output_register, input_register);
3599 if (imm == -1) {
3600 __ negq(output_register);
3601 }
3602 }
3603 break;
3604 }
3605
3606 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003607 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003608 }
3609}
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303610void InstructionCodeGeneratorX86_64::RemByPowerOfTwo(HRem* instruction) {
3611 LocationSummary* locations = instruction->GetLocations();
3612 Location second = locations->InAt(1);
3613 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3614 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3615 int64_t imm = Int64FromConstant(second.GetConstant());
3616 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3617 uint64_t abs_imm = AbsOrMin(imm);
3618 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3619 if (instruction->GetResultType() == DataType::Type::kInt32) {
3620 NearLabel done;
3621 __ movl(out, numerator);
3622 __ andl(out, Immediate(abs_imm-1));
3623 __ j(Condition::kZero, &done);
3624 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3625 __ testl(numerator, numerator);
3626 __ cmov(Condition::kLess, out, tmp, false);
3627 __ Bind(&done);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003628
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303629 } else {
3630 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
3631 codegen_->Load64BitValue(tmp, abs_imm - 1);
3632 NearLabel done;
3633
3634 __ movq(out, numerator);
3635 __ andq(out, tmp);
3636 __ j(Condition::kZero, &done);
3637 __ movq(tmp, numerator);
3638 __ sarq(tmp, Immediate(63));
3639 __ shlq(tmp, Immediate(WhichPowerOf2(abs_imm)));
3640 __ orq(out, tmp);
3641 __ Bind(&done);
3642 }
3643}
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003644void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003645 LocationSummary* locations = instruction->GetLocations();
3646 Location second = locations->InAt(1);
3647
3648 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3649 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3650
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003651 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003652 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3653 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003654
3655 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3656
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003657 if (instruction->GetResultType() == DataType::Type::kInt32) {
Atul Bajaj1cc73292018-11-15 11:36:53 +05303658 // When denominator is equal to 2, we can add signed bit and numerator to tmp.
3659 // Below we are using addl instruction instead of cmov which give us 1 cycle benefit.
3660 if (abs_imm == 2) {
3661 __ leal(tmp, Address(numerator, 0));
3662 __ shrl(tmp, Immediate(31));
3663 __ addl(tmp, numerator);
3664 } else {
3665 __ leal(tmp, Address(numerator, abs_imm - 1));
3666 __ testl(numerator, numerator);
3667 __ cmov(kGreaterEqual, tmp, numerator);
3668 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003669 int shift = CTZ(imm);
3670 __ sarl(tmp, Immediate(shift));
3671
3672 if (imm < 0) {
3673 __ negl(tmp);
3674 }
3675
3676 __ movl(output_register, tmp);
3677 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003678 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003679 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
Atul Bajaj1cc73292018-11-15 11:36:53 +05303680 if (abs_imm == 2) {
3681 __ movq(rdx, numerator);
3682 __ shrq(rdx, Immediate(63));
3683 __ addq(rdx, numerator);
3684 } else {
3685 codegen_->Load64BitValue(rdx, abs_imm - 1);
3686 __ addq(rdx, numerator);
3687 __ testq(numerator, numerator);
3688 __ cmov(kGreaterEqual, rdx, numerator);
3689 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003690 int shift = CTZ(imm);
3691 __ sarq(rdx, Immediate(shift));
3692
3693 if (imm < 0) {
3694 __ negq(rdx);
3695 }
3696
3697 __ movq(output_register, rdx);
3698 }
3699}
3700
3701void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3702 DCHECK(instruction->IsDiv() || instruction->IsRem());
3703
3704 LocationSummary* locations = instruction->GetLocations();
3705 Location second = locations->InAt(1);
3706
3707 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3708 : locations->GetTemp(0).AsRegister<CpuRegister>();
3709 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3710 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3711 : locations->Out().AsRegister<CpuRegister>();
3712 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3713
3714 DCHECK_EQ(RAX, eax.AsRegister());
3715 DCHECK_EQ(RDX, edx.AsRegister());
3716 if (instruction->IsDiv()) {
3717 DCHECK_EQ(RAX, out.AsRegister());
3718 } else {
3719 DCHECK_EQ(RDX, out.AsRegister());
3720 }
3721
3722 int64_t magic;
3723 int shift;
3724
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003725 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003726 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003727 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3728
Andreas Gampe3db70682018-12-26 15:12:03 -08003729 CalculateMagicAndShiftForDivRem(imm, false /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003730
3731 __ movl(numerator, eax);
3732
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003733 __ movl(eax, Immediate(magic));
3734 __ imull(numerator);
3735
3736 if (imm > 0 && magic < 0) {
3737 __ addl(edx, numerator);
3738 } else if (imm < 0 && magic > 0) {
3739 __ subl(edx, numerator);
3740 }
3741
3742 if (shift != 0) {
3743 __ sarl(edx, Immediate(shift));
3744 }
3745
3746 __ movl(eax, edx);
3747 __ shrl(edx, Immediate(31));
3748 __ addl(edx, eax);
3749
3750 if (instruction->IsRem()) {
3751 __ movl(eax, numerator);
3752 __ imull(edx, Immediate(imm));
3753 __ subl(eax, edx);
3754 __ movl(edx, eax);
3755 } else {
3756 __ movl(eax, edx);
3757 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003758 } else {
3759 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3760
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003761 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003762
3763 CpuRegister rax = eax;
3764 CpuRegister rdx = edx;
3765
Andreas Gampe3db70682018-12-26 15:12:03 -08003766 CalculateMagicAndShiftForDivRem(imm, true /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003767
3768 // Save the numerator.
3769 __ movq(numerator, rax);
3770
3771 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003772 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003773
3774 // RDX:RAX = magic * numerator
3775 __ imulq(numerator);
3776
3777 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003778 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003779 __ addq(rdx, numerator);
3780 } else if (imm < 0 && magic > 0) {
3781 // RDX -= numerator
3782 __ subq(rdx, numerator);
3783 }
3784
3785 // Shift if needed.
3786 if (shift != 0) {
3787 __ sarq(rdx, Immediate(shift));
3788 }
3789
3790 // RDX += 1 if RDX < 0
3791 __ movq(rax, rdx);
3792 __ shrq(rdx, Immediate(63));
3793 __ addq(rdx, rax);
3794
3795 if (instruction->IsRem()) {
3796 __ movq(rax, numerator);
3797
3798 if (IsInt<32>(imm)) {
3799 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3800 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003801 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003802 }
3803
3804 __ subq(rax, rdx);
3805 __ movq(rdx, rax);
3806 } else {
3807 __ movq(rax, rdx);
3808 }
3809 }
3810}
3811
Calin Juravlebacfec32014-11-14 15:54:36 +00003812void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3813 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003814 DataType::Type type = instruction->GetResultType();
3815 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00003816
3817 bool is_div = instruction->IsDiv();
3818 LocationSummary* locations = instruction->GetLocations();
3819
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003820 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3821 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003822
Roland Levillain271ab9c2014-11-27 15:23:57 +00003823 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003824 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003825
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003826 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003827 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003828
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003829 if (imm == 0) {
3830 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3831 } else if (imm == 1 || imm == -1) {
3832 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303833 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3834 if (is_div) {
3835 DivByPowerOfTwo(instruction->AsDiv());
3836 } else {
3837 RemByPowerOfTwo(instruction->AsRem());
3838 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003839 } else {
3840 DCHECK(imm <= -2 || imm >= 2);
3841 GenerateDivRemWithAnyConstant(instruction);
3842 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003843 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003844 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003845 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003846 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003847 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003848
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003849 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3850 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3851 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3852 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003853 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003854 __ cmpl(second_reg, Immediate(-1));
3855 __ j(kEqual, slow_path->GetEntryLabel());
3856 // edx:eax <- sign-extended of eax
3857 __ cdq();
3858 // eax = quotient, edx = remainder
3859 __ idivl(second_reg);
3860 } else {
3861 __ cmpq(second_reg, Immediate(-1));
3862 __ j(kEqual, slow_path->GetEntryLabel());
3863 // rdx:rax <- sign-extended of rax
3864 __ cqo();
3865 // rax = quotient, rdx = remainder
3866 __ idivq(second_reg);
3867 }
3868 __ Bind(slow_path->GetExitLabel());
3869 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003870}
3871
Calin Juravle7c4954d2014-10-28 16:57:40 +00003872void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3873 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003874 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003875 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003876 case DataType::Type::kInt32:
3877 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00003878 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003879 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003880 locations->SetOut(Location::SameAsFirstInput());
3881 // Intel uses edx:eax as the dividend.
3882 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003883 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3884 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3885 // output and request another temp.
3886 if (div->InputAt(1)->IsConstant()) {
3887 locations->AddTemp(Location::RequiresRegister());
3888 }
Calin Juravled0d48522014-11-04 16:40:20 +00003889 break;
3890 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003891
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003892 case DataType::Type::kFloat32:
3893 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003894 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003895 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003896 locations->SetOut(Location::SameAsFirstInput());
3897 break;
3898 }
3899
3900 default:
3901 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3902 }
3903}
3904
3905void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3906 LocationSummary* locations = div->GetLocations();
3907 Location first = locations->InAt(0);
3908 Location second = locations->InAt(1);
3909 DCHECK(first.Equals(locations->Out()));
3910
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003911 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003912 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003913 case DataType::Type::kInt32:
3914 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003915 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003916 break;
3917 }
3918
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003919 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003920 if (second.IsFpuRegister()) {
3921 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3922 } else if (second.IsConstant()) {
3923 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003924 codegen_->LiteralFloatAddress(
3925 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003926 } else {
3927 DCHECK(second.IsStackSlot());
3928 __ divss(first.AsFpuRegister<XmmRegister>(),
3929 Address(CpuRegister(RSP), second.GetStackIndex()));
3930 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003931 break;
3932 }
3933
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003934 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003935 if (second.IsFpuRegister()) {
3936 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3937 } else if (second.IsConstant()) {
3938 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003939 codegen_->LiteralDoubleAddress(
3940 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003941 } else {
3942 DCHECK(second.IsDoubleStackSlot());
3943 __ divsd(first.AsFpuRegister<XmmRegister>(),
3944 Address(CpuRegister(RSP), second.GetStackIndex()));
3945 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003946 break;
3947 }
3948
3949 default:
3950 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3951 }
3952}
3953
Calin Juravlebacfec32014-11-14 15:54:36 +00003954void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003955 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003956 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003957 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003958
3959 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003960 case DataType::Type::kInt32:
3961 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003962 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003963 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003964 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3965 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003966 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3967 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3968 // output and request another temp.
3969 if (rem->InputAt(1)->IsConstant()) {
3970 locations->AddTemp(Location::RequiresRegister());
3971 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003972 break;
3973 }
3974
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003975 case DataType::Type::kFloat32:
3976 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003977 locations->SetInAt(0, Location::Any());
3978 locations->SetInAt(1, Location::Any());
3979 locations->SetOut(Location::RequiresFpuRegister());
3980 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003981 break;
3982 }
3983
3984 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003985 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003986 }
3987}
3988
3989void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003990 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003991 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003992 case DataType::Type::kInt32:
3993 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003994 GenerateDivRemIntegral(rem);
3995 break;
3996 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003997 case DataType::Type::kFloat32:
3998 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003999 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004000 break;
4001 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004002 default:
4003 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
4004 }
4005}
4006
Aart Bik1f8d51b2018-02-15 10:42:37 -08004007static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4008 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4009 switch (minmax->GetResultType()) {
4010 case DataType::Type::kInt32:
4011 case DataType::Type::kInt64:
4012 locations->SetInAt(0, Location::RequiresRegister());
4013 locations->SetInAt(1, Location::RequiresRegister());
4014 locations->SetOut(Location::SameAsFirstInput());
4015 break;
4016 case DataType::Type::kFloat32:
4017 case DataType::Type::kFloat64:
4018 locations->SetInAt(0, Location::RequiresFpuRegister());
4019 locations->SetInAt(1, Location::RequiresFpuRegister());
4020 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
4021 // the second input to be the output (we can simply swap inputs).
4022 locations->SetOut(Location::SameAsFirstInput());
4023 break;
4024 default:
4025 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4026 }
4027}
4028
Aart Bik351df3e2018-03-07 11:54:57 -08004029void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
4030 bool is_min,
4031 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004032 Location op1_loc = locations->InAt(0);
4033 Location op2_loc = locations->InAt(1);
4034
4035 // Shortcut for same input locations.
4036 if (op1_loc.Equals(op2_loc)) {
4037 // Can return immediately, as op1_loc == out_loc.
4038 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
4039 // a copy here.
4040 DCHECK(locations->Out().Equals(op1_loc));
4041 return;
4042 }
4043
4044 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4045 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
4046
4047 // (out := op1)
4048 // out <=? op2
4049 // if out is min jmp done
4050 // out := op2
4051 // done:
4052
4053 if (type == DataType::Type::kInt64) {
4054 __ cmpq(out, op2);
4055 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
4056 } else {
4057 DCHECK_EQ(type, DataType::Type::kInt32);
4058 __ cmpl(out, op2);
4059 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
4060 }
4061}
4062
4063void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
4064 bool is_min,
4065 DataType::Type type) {
4066 Location op1_loc = locations->InAt(0);
4067 Location op2_loc = locations->InAt(1);
4068 Location out_loc = locations->Out();
4069 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4070
4071 // Shortcut for same input locations.
4072 if (op1_loc.Equals(op2_loc)) {
4073 DCHECK(out_loc.Equals(op1_loc));
4074 return;
4075 }
4076
4077 // (out := op1)
4078 // out <=? op2
4079 // if Nan jmp Nan_label
4080 // if out is min jmp done
4081 // if op2 is min jmp op2_label
4082 // handle -0/+0
4083 // jmp done
4084 // Nan_label:
4085 // out := NaN
4086 // op2_label:
4087 // out := op2
4088 // done:
4089 //
4090 // This removes one jmp, but needs to copy one input (op1) to out.
4091 //
4092 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
4093
4094 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4095
4096 NearLabel nan, done, op2_label;
4097 if (type == DataType::Type::kFloat64) {
4098 __ ucomisd(out, op2);
4099 } else {
4100 DCHECK_EQ(type, DataType::Type::kFloat32);
4101 __ ucomiss(out, op2);
4102 }
4103
4104 __ j(Condition::kParityEven, &nan);
4105
4106 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4107 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4108
4109 // Handle 0.0/-0.0.
4110 if (is_min) {
4111 if (type == DataType::Type::kFloat64) {
4112 __ orpd(out, op2);
4113 } else {
4114 __ orps(out, op2);
4115 }
4116 } else {
4117 if (type == DataType::Type::kFloat64) {
4118 __ andpd(out, op2);
4119 } else {
4120 __ andps(out, op2);
4121 }
4122 }
4123 __ jmp(&done);
4124
4125 // NaN handling.
4126 __ Bind(&nan);
4127 if (type == DataType::Type::kFloat64) {
4128 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
4129 } else {
4130 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
4131 }
4132 __ jmp(&done);
4133
4134 // out := op2;
4135 __ Bind(&op2_label);
4136 if (type == DataType::Type::kFloat64) {
4137 __ movsd(out, op2);
4138 } else {
4139 __ movss(out, op2);
4140 }
4141
4142 // Done.
4143 __ Bind(&done);
4144}
4145
Aart Bik351df3e2018-03-07 11:54:57 -08004146void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4147 DataType::Type type = minmax->GetResultType();
4148 switch (type) {
4149 case DataType::Type::kInt32:
4150 case DataType::Type::kInt64:
4151 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4152 break;
4153 case DataType::Type::kFloat32:
4154 case DataType::Type::kFloat64:
4155 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4156 break;
4157 default:
4158 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4159 }
4160}
4161
Aart Bik1f8d51b2018-02-15 10:42:37 -08004162void LocationsBuilderX86_64::VisitMin(HMin* min) {
4163 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4164}
4165
4166void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004167 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004168}
4169
4170void LocationsBuilderX86_64::VisitMax(HMax* max) {
4171 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4172}
4173
4174void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004175 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004176}
4177
Aart Bik3dad3412018-02-28 12:01:46 -08004178void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4179 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4180 switch (abs->GetResultType()) {
4181 case DataType::Type::kInt32:
4182 case DataType::Type::kInt64:
4183 locations->SetInAt(0, Location::RequiresRegister());
4184 locations->SetOut(Location::SameAsFirstInput());
4185 locations->AddTemp(Location::RequiresRegister());
4186 break;
4187 case DataType::Type::kFloat32:
4188 case DataType::Type::kFloat64:
4189 locations->SetInAt(0, Location::RequiresFpuRegister());
4190 locations->SetOut(Location::SameAsFirstInput());
4191 locations->AddTemp(Location::RequiresFpuRegister());
4192 break;
4193 default:
4194 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4195 }
4196}
4197
4198void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4199 LocationSummary* locations = abs->GetLocations();
4200 switch (abs->GetResultType()) {
4201 case DataType::Type::kInt32: {
4202 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4203 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4204 // Create mask.
4205 __ movl(mask, out);
4206 __ sarl(mask, Immediate(31));
4207 // Add mask.
4208 __ addl(out, mask);
4209 __ xorl(out, mask);
4210 break;
4211 }
4212 case DataType::Type::kInt64: {
4213 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4214 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4215 // Create mask.
4216 __ movq(mask, out);
4217 __ sarq(mask, Immediate(63));
4218 // Add mask.
4219 __ addq(out, mask);
4220 __ xorq(out, mask);
4221 break;
4222 }
4223 case DataType::Type::kFloat32: {
4224 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4225 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4226 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4227 __ andps(out, mask);
4228 break;
4229 }
4230 case DataType::Type::kFloat64: {
4231 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4232 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4233 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4234 __ andpd(out, mask);
4235 break;
4236 }
4237 default:
4238 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4239 }
4240}
4241
Calin Juravled0d48522014-11-04 16:40:20 +00004242void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004243 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004244 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004245}
4246
4247void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004248 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004249 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004250 codegen_->AddSlowPath(slow_path);
4251
4252 LocationSummary* locations = instruction->GetLocations();
4253 Location value = locations->InAt(0);
4254
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004255 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004256 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004257 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004258 case DataType::Type::kInt8:
4259 case DataType::Type::kUint16:
4260 case DataType::Type::kInt16:
4261 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004262 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004263 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004264 __ j(kEqual, slow_path->GetEntryLabel());
4265 } else if (value.IsStackSlot()) {
4266 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4267 __ j(kEqual, slow_path->GetEntryLabel());
4268 } else {
4269 DCHECK(value.IsConstant()) << value;
4270 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004271 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004272 }
4273 }
4274 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004275 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004276 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004277 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004278 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004279 __ j(kEqual, slow_path->GetEntryLabel());
4280 } else if (value.IsDoubleStackSlot()) {
4281 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4282 __ j(kEqual, slow_path->GetEntryLabel());
4283 } else {
4284 DCHECK(value.IsConstant()) << value;
4285 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004286 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004287 }
4288 }
4289 break;
4290 }
4291 default:
4292 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004293 }
Calin Juravled0d48522014-11-04 16:40:20 +00004294}
4295
Calin Juravle9aec02f2014-11-18 23:06:35 +00004296void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4297 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4298
4299 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004300 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004301
4302 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004303 case DataType::Type::kInt32:
4304 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004305 locations->SetInAt(0, Location::RequiresRegister());
4306 // The shift count needs to be in CL.
4307 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4308 locations->SetOut(Location::SameAsFirstInput());
4309 break;
4310 }
4311 default:
4312 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4313 }
4314}
4315
4316void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4317 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4318
4319 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004320 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004321 Location second = locations->InAt(1);
4322
4323 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004324 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004325 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004326 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004327 if (op->IsShl()) {
4328 __ shll(first_reg, second_reg);
4329 } else if (op->IsShr()) {
4330 __ sarl(first_reg, second_reg);
4331 } else {
4332 __ shrl(first_reg, second_reg);
4333 }
4334 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004335 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004336 if (op->IsShl()) {
4337 __ shll(first_reg, imm);
4338 } else if (op->IsShr()) {
4339 __ sarl(first_reg, imm);
4340 } else {
4341 __ shrl(first_reg, imm);
4342 }
4343 }
4344 break;
4345 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004346 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004347 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004348 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004349 if (op->IsShl()) {
4350 __ shlq(first_reg, second_reg);
4351 } else if (op->IsShr()) {
4352 __ sarq(first_reg, second_reg);
4353 } else {
4354 __ shrq(first_reg, second_reg);
4355 }
4356 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004357 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004358 if (op->IsShl()) {
4359 __ shlq(first_reg, imm);
4360 } else if (op->IsShr()) {
4361 __ sarq(first_reg, imm);
4362 } else {
4363 __ shrq(first_reg, imm);
4364 }
4365 }
4366 break;
4367 }
4368 default:
4369 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004370 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004371 }
4372}
4373
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004374void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4375 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004376 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004377
4378 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004379 case DataType::Type::kInt32:
4380 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004381 locations->SetInAt(0, Location::RequiresRegister());
4382 // The shift count needs to be in CL (unless it is a constant).
4383 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4384 locations->SetOut(Location::SameAsFirstInput());
4385 break;
4386 }
4387 default:
4388 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4389 UNREACHABLE();
4390 }
4391}
4392
4393void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4394 LocationSummary* locations = ror->GetLocations();
4395 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4396 Location second = locations->InAt(1);
4397
4398 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004399 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004400 if (second.IsRegister()) {
4401 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4402 __ rorl(first_reg, second_reg);
4403 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004404 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004405 __ rorl(first_reg, imm);
4406 }
4407 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004408 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004409 if (second.IsRegister()) {
4410 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4411 __ rorq(first_reg, second_reg);
4412 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004413 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004414 __ rorq(first_reg, imm);
4415 }
4416 break;
4417 default:
4418 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4419 UNREACHABLE();
4420 }
4421}
4422
Calin Juravle9aec02f2014-11-18 23:06:35 +00004423void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4424 HandleShift(shl);
4425}
4426
4427void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4428 HandleShift(shl);
4429}
4430
4431void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4432 HandleShift(shr);
4433}
4434
4435void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4436 HandleShift(shr);
4437}
4438
4439void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4440 HandleShift(ushr);
4441}
4442
4443void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4444 HandleShift(ushr);
4445}
4446
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004447void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004448 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4449 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004450 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07004451 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004452 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004453}
4454
4455void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004456 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4457 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4458 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004459}
4460
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004461void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004462 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4463 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004464 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004465 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004466 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4467 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004468}
4469
4470void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004471 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4472 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004473 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004474 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004475 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004476}
4477
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004478void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004479 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004480 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004481 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4482 if (location.IsStackSlot()) {
4483 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4484 } else if (location.IsDoubleStackSlot()) {
4485 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4486 }
4487 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004488}
4489
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004490void InstructionCodeGeneratorX86_64::VisitParameterValue(
4491 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004492 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004493}
4494
4495void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4496 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004497 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004498 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4499}
4500
4501void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4502 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4503 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004504}
4505
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004506void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4507 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004508 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004509 locations->SetInAt(0, Location::RequiresRegister());
4510 locations->SetOut(Location::RequiresRegister());
4511}
4512
4513void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4514 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004515 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004516 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004517 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004518 __ movq(locations->Out().AsRegister<CpuRegister>(),
4519 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004520 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004521 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004522 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004523 __ movq(locations->Out().AsRegister<CpuRegister>(),
4524 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4525 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004526 __ movq(locations->Out().AsRegister<CpuRegister>(),
4527 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004528 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004529}
4530
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004531void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004532 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004533 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004534 locations->SetInAt(0, Location::RequiresRegister());
4535 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004536}
4537
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004538void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4539 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004540 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4541 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004542 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004543 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004544 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004545 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004546 break;
4547
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004548 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004549 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004550 break;
4551
4552 default:
4553 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4554 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004555}
4556
David Brazdil66d126e2015-04-03 16:02:44 +01004557void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4558 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004559 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004560 locations->SetInAt(0, Location::RequiresRegister());
4561 locations->SetOut(Location::SameAsFirstInput());
4562}
4563
4564void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004565 LocationSummary* locations = bool_not->GetLocations();
4566 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4567 locations->Out().AsRegister<CpuRegister>().AsRegister());
4568 Location out = locations->Out();
4569 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4570}
4571
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004572void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004573 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004574 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004575 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004576 locations->SetInAt(i, Location::Any());
4577 }
4578 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004579}
4580
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004581void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004582 LOG(FATAL) << "Unimplemented";
4583}
4584
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004585void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004586 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004587 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004588 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004589 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4590 */
4591 switch (kind) {
4592 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004593 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004594 break;
4595 }
4596 case MemBarrierKind::kAnyStore:
4597 case MemBarrierKind::kLoadAny:
4598 case MemBarrierKind::kStoreStore: {
4599 // nop
4600 break;
4601 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004602 case MemBarrierKind::kNTStoreStore:
4603 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004604 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004605 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004606 }
4607}
4608
4609void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4610 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4611
Roland Levillain0d5a2812015-11-13 10:07:31 +00004612 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004613 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004614 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004615 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4616 object_field_get_with_read_barrier
4617 ? LocationSummary::kCallOnSlowPath
4618 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004619 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004620 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004621 }
Calin Juravle52c48962014-12-16 17:02:57 +00004622 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004623 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004624 locations->SetOut(Location::RequiresFpuRegister());
4625 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004626 // The output overlaps for an object field get when read barriers
4627 // are enabled: we do not want the move to overwrite the object's
4628 // location, as we need it to emit the read barrier.
4629 locations->SetOut(
4630 Location::RequiresRegister(),
4631 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004632 }
Calin Juravle52c48962014-12-16 17:02:57 +00004633}
4634
4635void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4636 const FieldInfo& field_info) {
4637 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4638
4639 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004640 Location base_loc = locations->InAt(0);
4641 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004642 Location out = locations->Out();
4643 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004644 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4645 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004646 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4647
Vladimir Marko61b92282017-10-11 13:23:17 +01004648 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004649 case DataType::Type::kBool:
4650 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004651 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4652 break;
4653 }
4654
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004655 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004656 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4657 break;
4658 }
4659
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004660 case DataType::Type::kUint16: {
4661 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004662 break;
4663 }
4664
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004665 case DataType::Type::kInt16: {
4666 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004667 break;
4668 }
4669
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004670 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004671 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4672 break;
4673 }
4674
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004675 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004676 // /* HeapReference<Object> */ out = *(base + offset)
4677 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004678 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004679 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004680 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08004681 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004682 if (is_volatile) {
4683 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4684 }
4685 } else {
4686 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4687 codegen_->MaybeRecordImplicitNullCheck(instruction);
4688 if (is_volatile) {
4689 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4690 }
4691 // If read barriers are enabled, emit read barriers other than
4692 // Baker's using a slow path (and also unpoison the loaded
4693 // reference, if heap poisoning is enabled).
4694 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4695 }
4696 break;
4697 }
4698
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004699 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004700 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4701 break;
4702 }
4703
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004704 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004705 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4706 break;
4707 }
4708
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004709 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004710 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4711 break;
4712 }
4713
Aart Bik66c158e2018-01-31 12:55:04 -08004714 case DataType::Type::kUint32:
4715 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004716 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004717 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004718 UNREACHABLE();
4719 }
4720
Vladimir Marko61b92282017-10-11 13:23:17 +01004721 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004722 // Potential implicit null checks, in the case of reference
4723 // fields, are handled in the previous switch statement.
4724 } else {
4725 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004726 }
Roland Levillain4d027112015-07-01 15:41:14 +01004727
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004728 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004729 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004730 // Memory barriers, in the case of references, are also handled
4731 // in the previous switch statement.
4732 } else {
4733 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4734 }
Roland Levillain4d027112015-07-01 15:41:14 +01004735 }
Calin Juravle52c48962014-12-16 17:02:57 +00004736}
4737
4738void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4739 const FieldInfo& field_info) {
4740 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4741
4742 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004743 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004744 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004745 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004746 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004747 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004748
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004749 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004750 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004751 if (is_volatile) {
4752 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4753 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4754 } else {
4755 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4756 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004757 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004758 if (is_volatile) {
4759 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4760 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4761 } else {
4762 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4763 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004764 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004765 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004766 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004767 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004768 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004769 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004770 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004771 locations->AddTemp(Location::RequiresRegister());
4772 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004773}
4774
Calin Juravle52c48962014-12-16 17:02:57 +00004775void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004776 const FieldInfo& field_info,
4777 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004778 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4779
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004780 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004781 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4782 Location value = locations->InAt(1);
4783 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004784 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00004785 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4786
4787 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004788 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004789 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004790
Mark Mendellea5af682015-10-22 17:35:49 -04004791 bool maybe_record_implicit_null_check_done = false;
4792
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004793 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004794 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004795 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004796 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04004797 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004798 __ movb(Address(base, offset),
4799 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004800 } else {
4801 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4802 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004803 break;
4804 }
4805
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004806 case DataType::Type::kUint16:
4807 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04004808 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004809 __ movw(Address(base, offset),
4810 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004811 } else {
4812 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4813 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004814 break;
4815 }
4816
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004817 case DataType::Type::kInt32:
4818 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04004819 if (value.IsConstant()) {
4820 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004821 // `field_type == DataType::Type::kReference` implies `v == 0`.
4822 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01004823 // Note: if heap poisoning is enabled, no need to poison
4824 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004825 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004826 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004827 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004828 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4829 __ movl(temp, value.AsRegister<CpuRegister>());
4830 __ PoisonHeapReference(temp);
4831 __ movl(Address(base, offset), temp);
4832 } else {
4833 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4834 }
Mark Mendell40741f32015-04-20 22:10:34 -04004835 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004836 break;
4837 }
4838
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004839 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04004840 if (value.IsConstant()) {
4841 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004842 codegen_->MoveInt64ToAddress(Address(base, offset),
4843 Address(base, offset + sizeof(int32_t)),
4844 v,
4845 instruction);
4846 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004847 } else {
4848 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4849 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004850 break;
4851 }
4852
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004853 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04004854 if (value.IsConstant()) {
4855 int32_t v =
4856 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4857 __ movl(Address(base, offset), Immediate(v));
4858 } else {
4859 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4860 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004861 break;
4862 }
4863
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004864 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04004865 if (value.IsConstant()) {
4866 int64_t v =
4867 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4868 codegen_->MoveInt64ToAddress(Address(base, offset),
4869 Address(base, offset + sizeof(int32_t)),
4870 v,
4871 instruction);
4872 maybe_record_implicit_null_check_done = true;
4873 } else {
4874 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4875 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004876 break;
4877 }
4878
Aart Bik66c158e2018-01-31 12:55:04 -08004879 case DataType::Type::kUint32:
4880 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004881 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004882 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004883 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004884 }
Calin Juravle52c48962014-12-16 17:02:57 +00004885
Mark Mendellea5af682015-10-22 17:35:49 -04004886 if (!maybe_record_implicit_null_check_done) {
4887 codegen_->MaybeRecordImplicitNullCheck(instruction);
4888 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004889
4890 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4891 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4892 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004893 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004894 }
4895
Calin Juravle52c48962014-12-16 17:02:57 +00004896 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004897 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004898 }
4899}
4900
4901void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4902 HandleFieldSet(instruction, instruction->GetFieldInfo());
4903}
4904
4905void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004906 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004907}
4908
4909void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004910 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004911}
4912
4913void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004914 HandleFieldGet(instruction, instruction->GetFieldInfo());
4915}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004916
Calin Juravle52c48962014-12-16 17:02:57 +00004917void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4918 HandleFieldGet(instruction);
4919}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004920
Calin Juravle52c48962014-12-16 17:02:57 +00004921void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4922 HandleFieldGet(instruction, instruction->GetFieldInfo());
4923}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004924
Calin Juravle52c48962014-12-16 17:02:57 +00004925void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4926 HandleFieldSet(instruction, instruction->GetFieldInfo());
4927}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004928
Calin Juravle52c48962014-12-16 17:02:57 +00004929void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004930 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004931}
4932
Vladimir Marko552a1342017-10-31 10:56:47 +00004933void LocationsBuilderX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4934 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(RAX));
4935}
4936
4937void InstructionCodeGeneratorX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4938 __ movl(CpuRegister(RDI), Immediate(instruction->GetFormat()->GetValue()));
4939 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
4940}
4941
Calin Juravlee460d1d2015-09-29 04:52:17 +01004942void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4943 HUnresolvedInstanceFieldGet* instruction) {
4944 FieldAccessCallingConventionX86_64 calling_convention;
4945 codegen_->CreateUnresolvedFieldLocationSummary(
4946 instruction, instruction->GetFieldType(), calling_convention);
4947}
4948
4949void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4950 HUnresolvedInstanceFieldGet* instruction) {
4951 FieldAccessCallingConventionX86_64 calling_convention;
4952 codegen_->GenerateUnresolvedFieldAccess(instruction,
4953 instruction->GetFieldType(),
4954 instruction->GetFieldIndex(),
4955 instruction->GetDexPc(),
4956 calling_convention);
4957}
4958
4959void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4960 HUnresolvedInstanceFieldSet* instruction) {
4961 FieldAccessCallingConventionX86_64 calling_convention;
4962 codegen_->CreateUnresolvedFieldLocationSummary(
4963 instruction, instruction->GetFieldType(), calling_convention);
4964}
4965
4966void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4967 HUnresolvedInstanceFieldSet* instruction) {
4968 FieldAccessCallingConventionX86_64 calling_convention;
4969 codegen_->GenerateUnresolvedFieldAccess(instruction,
4970 instruction->GetFieldType(),
4971 instruction->GetFieldIndex(),
4972 instruction->GetDexPc(),
4973 calling_convention);
4974}
4975
4976void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4977 HUnresolvedStaticFieldGet* instruction) {
4978 FieldAccessCallingConventionX86_64 calling_convention;
4979 codegen_->CreateUnresolvedFieldLocationSummary(
4980 instruction, instruction->GetFieldType(), calling_convention);
4981}
4982
4983void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4984 HUnresolvedStaticFieldGet* instruction) {
4985 FieldAccessCallingConventionX86_64 calling_convention;
4986 codegen_->GenerateUnresolvedFieldAccess(instruction,
4987 instruction->GetFieldType(),
4988 instruction->GetFieldIndex(),
4989 instruction->GetDexPc(),
4990 calling_convention);
4991}
4992
4993void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4994 HUnresolvedStaticFieldSet* instruction) {
4995 FieldAccessCallingConventionX86_64 calling_convention;
4996 codegen_->CreateUnresolvedFieldLocationSummary(
4997 instruction, instruction->GetFieldType(), calling_convention);
4998}
4999
5000void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
5001 HUnresolvedStaticFieldSet* instruction) {
5002 FieldAccessCallingConventionX86_64 calling_convention;
5003 codegen_->GenerateUnresolvedFieldAccess(instruction,
5004 instruction->GetFieldType(),
5005 instruction->GetFieldIndex(),
5006 instruction->GetDexPc(),
5007 calling_convention);
5008}
5009
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005010void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005011 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5012 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5013 ? Location::RequiresRegister()
5014 : Location::Any();
5015 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005016}
5017
Calin Juravle2ae48182016-03-16 14:05:09 +00005018void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5019 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005020 return;
5021 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005022 LocationSummary* locations = instruction->GetLocations();
5023 Location obj = locations->InAt(0);
5024
5025 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005026 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005027}
5028
Calin Juravle2ae48182016-03-16 14:05:09 +00005029void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005030 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005031 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005032
5033 LocationSummary* locations = instruction->GetLocations();
5034 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005035
5036 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005037 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005038 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005039 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005040 } else {
5041 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005042 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005043 __ jmp(slow_path->GetEntryLabel());
5044 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005045 }
5046 __ j(kEqual, slow_path->GetEntryLabel());
5047}
5048
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005049void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005050 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005051}
5052
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005053void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005054 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005055 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005056 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005057 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5058 object_array_get_with_read_barrier
5059 ? LocationSummary::kCallOnSlowPath
5060 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005061 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005062 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005063 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005064 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005065 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005066 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005067 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5068 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005069 // The output overlaps for an object array get when read barriers
5070 // are enabled: we do not want the move to overwrite the array's
5071 // location, as we need it to emit the read barrier.
5072 locations->SetOut(
5073 Location::RequiresRegister(),
5074 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005075 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005076}
5077
5078void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
5079 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005080 Location obj_loc = locations->InAt(0);
5081 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005082 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005083 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005084 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005085
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005086 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01005087 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005088 case DataType::Type::kBool:
5089 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005090 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005091 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005092 break;
5093 }
5094
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005095 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005096 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005097 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005098 break;
5099 }
5100
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005101 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005102 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07005103 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5104 // Branch cases into compressed and uncompressed for each index's type.
5105 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5106 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005107 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005108 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005109 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5110 "Expecting 0=compressed, 1=uncompressed");
5111 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005112 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
5113 __ jmp(&done);
5114 __ Bind(&not_compressed);
5115 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5116 __ Bind(&done);
5117 } else {
5118 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5119 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005120 break;
5121 }
5122
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005123 case DataType::Type::kInt16: {
5124 CpuRegister out = out_loc.AsRegister<CpuRegister>();
5125 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5126 break;
5127 }
5128
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005129 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005130 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005131 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005132 break;
5133 }
5134
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005135 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005136 static_assert(
5137 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5138 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005139 // /* HeapReference<Object> */ out =
5140 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5141 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005142 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005143 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005144 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005145 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005146 } else {
5147 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005148 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
5149 codegen_->MaybeRecordImplicitNullCheck(instruction);
5150 // If read barriers are enabled, emit read barriers other than
5151 // Baker's using a slow path (and also unpoison the loaded
5152 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005153 if (index.IsConstant()) {
5154 uint32_t offset =
5155 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005156 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5157 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005158 codegen_->MaybeGenerateReadBarrierSlow(
5159 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5160 }
5161 }
5162 break;
5163 }
5164
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005165 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005166 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005167 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005168 break;
5169 }
5170
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005171 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005172 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005173 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005174 break;
5175 }
5176
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005177 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005178 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005179 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005180 break;
5181 }
5182
Aart Bik66c158e2018-01-31 12:55:04 -08005183 case DataType::Type::kUint32:
5184 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005185 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01005186 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005187 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005188 }
Roland Levillain4d027112015-07-01 15:41:14 +01005189
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005190 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005191 // Potential implicit null checks, in the case of reference
5192 // arrays, are handled in the previous switch statement.
5193 } else {
5194 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005195 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005196}
5197
5198void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005199 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005200
5201 bool needs_write_barrier =
5202 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005203 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005204
Vladimir Markoca6fff82017-10-03 14:49:14 +01005205 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005206 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005207 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005208
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005209 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005210 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005211 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005212 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005213 } else {
5214 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5215 }
5216
5217 if (needs_write_barrier) {
5218 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005219 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005220 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005221 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005222}
5223
5224void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5225 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005226 Location array_loc = locations->InAt(0);
5227 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005228 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005229 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005230 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005231 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005232 bool needs_write_barrier =
5233 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005234
5235 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005236 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005237 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005238 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005239 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005240 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005241 if (value.IsRegister()) {
5242 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005243 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005244 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005245 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005246 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005247 break;
5248 }
5249
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005250 case DataType::Type::kUint16:
5251 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005252 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005253 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005254 if (value.IsRegister()) {
5255 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005256 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005257 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005258 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005259 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005260 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005261 break;
5262 }
5263
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005264 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005265 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005266 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005267
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005268 if (!value.IsRegister()) {
5269 // Just setting null.
5270 DCHECK(instruction->InputAt(2)->IsNullConstant());
5271 DCHECK(value.IsConstant()) << value;
5272 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005273 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005274 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005275 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005276 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005277 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005278
5279 DCHECK(needs_write_barrier);
5280 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005281 Location temp_loc = locations->GetTemp(0);
5282 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005283
5284 bool can_value_be_null = instruction->GetValueCanBeNull();
5285 NearLabel do_store;
5286 if (can_value_be_null) {
5287 __ testl(register_value, register_value);
5288 __ j(kEqual, &do_store);
5289 }
5290
5291 SlowPathCode* slow_path = nullptr;
5292 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005293 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005294 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005295
5296 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5297 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5298 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005299
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005300 // Note that when Baker read barriers are enabled, the type
5301 // checks are performed without read barriers. This is fine,
5302 // even in the case where a class object is in the from-space
5303 // after the flip, as a comparison involving such a type would
5304 // not produce a false positive; it may of course produce a
5305 // false negative, in which case we would take the ArraySet
5306 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005307
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005308 // /* HeapReference<Class> */ temp = array->klass_
5309 __ movl(temp, Address(array, class_offset));
5310 codegen_->MaybeRecordImplicitNullCheck(instruction);
5311 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005312
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005313 // /* HeapReference<Class> */ temp = temp->component_type_
5314 __ movl(temp, Address(temp, component_offset));
5315 // If heap poisoning is enabled, no need to unpoison `temp`
5316 // nor the object reference in `register_value->klass`, as
5317 // we are comparing two poisoned references.
5318 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005319
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005320 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005321 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005322 __ j(kEqual, &do_put);
5323 // If heap poisoning is enabled, the `temp` reference has
5324 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005325 __ MaybeUnpoisonHeapReference(temp);
5326
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005327 // If heap poisoning is enabled, no need to unpoison the
5328 // heap reference loaded below, as it is only used for a
5329 // comparison with null.
5330 __ cmpl(Address(temp, super_offset), Immediate(0));
5331 __ j(kNotEqual, slow_path->GetEntryLabel());
5332 __ Bind(&do_put);
5333 } else {
5334 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005335 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005336 }
5337
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005338 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5339 codegen_->MarkGCCard(
5340 temp, card, array, value.AsRegister<CpuRegister>(), /* value_can_be_null= */ false);
5341
5342 if (can_value_be_null) {
5343 DCHECK(do_store.IsLinked());
5344 __ Bind(&do_store);
5345 }
5346
5347 Location source = value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005348 if (kPoisonHeapReferences) {
5349 __ movl(temp, register_value);
5350 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005351 source = temp_loc;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005352 }
5353
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005354 __ movl(address, source.AsRegister<CpuRegister>());
5355
5356 if (can_value_be_null || !needs_type_check) {
5357 codegen_->MaybeRecordImplicitNullCheck(instruction);
5358 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005359
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005360 if (slow_path != nullptr) {
5361 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005362 }
5363
5364 break;
5365 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005366
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005367 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005368 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005369 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005370 if (value.IsRegister()) {
5371 __ movl(address, value.AsRegister<CpuRegister>());
5372 } else {
5373 DCHECK(value.IsConstant()) << value;
5374 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5375 __ movl(address, Immediate(v));
5376 }
5377 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005378 break;
5379 }
5380
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005381 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005382 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005383 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005384 if (value.IsRegister()) {
5385 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005386 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005387 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005388 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005389 Address address_high =
5390 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005391 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005392 }
5393 break;
5394 }
5395
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005396 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005397 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005398 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005399 if (value.IsFpuRegister()) {
5400 __ movss(address, value.AsFpuRegister<XmmRegister>());
5401 } else {
5402 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005403 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005404 __ movl(address, Immediate(v));
5405 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005406 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005407 break;
5408 }
5409
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005410 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005411 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005412 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005413 if (value.IsFpuRegister()) {
5414 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5415 codegen_->MaybeRecordImplicitNullCheck(instruction);
5416 } else {
5417 int64_t v =
5418 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005419 Address address_high =
5420 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005421 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5422 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005423 break;
5424 }
5425
Aart Bik66c158e2018-01-31 12:55:04 -08005426 case DataType::Type::kUint32:
5427 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005428 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005429 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005430 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005431 }
5432}
5433
5434void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005435 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005436 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005437 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005438 if (!instruction->IsEmittedAtUseSite()) {
5439 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5440 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005441}
5442
5443void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005444 if (instruction->IsEmittedAtUseSite()) {
5445 return;
5446 }
5447
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005448 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005449 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005450 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5451 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005452 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005453 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005454 // Mask out most significant bit in case the array is String's array of char.
5455 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005456 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005457 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005458}
5459
5460void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005461 RegisterSet caller_saves = RegisterSet::Empty();
5462 InvokeRuntimeCallingConvention calling_convention;
5463 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5464 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5465 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005466 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005467 HInstruction* length = instruction->InputAt(1);
5468 if (!length->IsEmittedAtUseSite()) {
5469 locations->SetInAt(1, Location::RegisterOrConstant(length));
5470 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005471}
5472
5473void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5474 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005475 Location index_loc = locations->InAt(0);
5476 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005477 SlowPathCode* slow_path =
5478 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005479
Mark Mendell99dbd682015-04-22 16:18:52 -04005480 if (length_loc.IsConstant()) {
5481 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5482 if (index_loc.IsConstant()) {
5483 // BCE will remove the bounds check if we are guarenteed to pass.
5484 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5485 if (index < 0 || index >= length) {
5486 codegen_->AddSlowPath(slow_path);
5487 __ jmp(slow_path->GetEntryLabel());
5488 } else {
5489 // Some optimization after BCE may have generated this, and we should not
5490 // generate a bounds check if it is a valid range.
5491 }
5492 return;
5493 }
5494
5495 // We have to reverse the jump condition because the length is the constant.
5496 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5497 __ cmpl(index_reg, Immediate(length));
5498 codegen_->AddSlowPath(slow_path);
5499 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005500 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005501 HInstruction* array_length = instruction->InputAt(1);
5502 if (array_length->IsEmittedAtUseSite()) {
5503 // Address the length field in the array.
5504 DCHECK(array_length->IsArrayLength());
5505 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5506 Location array_loc = array_length->GetLocations()->InAt(0);
5507 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005508 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005509 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5510 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005511 CpuRegister length_reg = CpuRegister(TMP);
5512 __ movl(length_reg, array_len);
5513 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005514 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005515 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005516 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005517 // Checking the bound for general case:
5518 // Array of char or String's array when the compression feature off.
5519 if (index_loc.IsConstant()) {
5520 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5521 __ cmpl(array_len, Immediate(value));
5522 } else {
5523 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5524 }
5525 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005526 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005527 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005528 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005529 }
5530 codegen_->AddSlowPath(slow_path);
5531 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005532 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005533}
5534
5535void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5536 CpuRegister card,
5537 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005538 CpuRegister value,
5539 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005540 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005541 if (value_can_be_null) {
5542 __ testl(value, value);
5543 __ j(kEqual, &is_null);
5544 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005545 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005546 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005547 /* no_rip= */ true));
Roland Levillainc73f0522018-08-14 15:16:50 +01005548 // Calculate the offset (in the card table) of the card corresponding to
5549 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005550 __ movq(temp, object);
5551 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005552 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5553 // `object`'s card.
5554 //
5555 // Register `card` contains the address of the card table. Note that the card
5556 // table's base is biased during its creation so that it always starts at an
5557 // address whose least-significant byte is equal to `kCardDirty` (see
5558 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5559 // below writes the `kCardDirty` (byte) value into the `object`'s card
5560 // (located at `card + object >> kCardShift`).
5561 //
5562 // This dual use of the value in register `card` (1. to calculate the location
5563 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5564 // (no need to explicitly load `kCardDirty` as an immediate value).
Roland Levillain4d027112015-07-01 15:41:14 +01005565 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005566 if (value_can_be_null) {
5567 __ Bind(&is_null);
5568 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005569}
5570
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005571void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005572 LOG(FATAL) << "Unimplemented";
5573}
5574
5575void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005576 if (instruction->GetNext()->IsSuspendCheck() &&
5577 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5578 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5579 // The back edge will generate the suspend check.
5580 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5581 }
5582
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005583 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5584}
5585
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005586void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005587 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5588 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005589 // In suspend check slow path, usually there are no caller-save registers at all.
5590 // If SIMD instructions are present, however, we force spilling all live SIMD
5591 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005592 locations->SetCustomSlowPathCallerSaves(
5593 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005594}
5595
5596void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005597 HBasicBlock* block = instruction->GetBlock();
5598 if (block->GetLoopInformation() != nullptr) {
5599 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5600 // The back edge will generate the suspend check.
5601 return;
5602 }
5603 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5604 // The goto will generate the suspend check.
5605 return;
5606 }
5607 GenerateSuspendCheck(instruction, nullptr);
5608}
5609
5610void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5611 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005612 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005613 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5614 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005615 slow_path =
5616 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005617 instruction->SetSlowPath(slow_path);
5618 codegen_->AddSlowPath(slow_path);
5619 if (successor != nullptr) {
5620 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005621 }
5622 } else {
5623 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5624 }
5625
Andreas Gampe542451c2016-07-26 09:02:02 -07005626 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005627 /* no_rip= */ true),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005628 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005629 if (successor == nullptr) {
5630 __ j(kNotEqual, slow_path->GetEntryLabel());
5631 __ Bind(slow_path->GetReturnLabel());
5632 } else {
5633 __ j(kEqual, codegen_->GetLabelOf(successor));
5634 __ jmp(slow_path->GetEntryLabel());
5635 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005636}
5637
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005638X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5639 return codegen_->GetAssembler();
5640}
5641
5642void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005643 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005644 Location source = move->GetSource();
5645 Location destination = move->GetDestination();
5646
5647 if (source.IsRegister()) {
5648 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005649 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005650 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005651 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005652 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005653 } else {
5654 DCHECK(destination.IsDoubleStackSlot());
5655 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005656 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005657 }
5658 } else if (source.IsStackSlot()) {
5659 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005660 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005661 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005662 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005663 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005664 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005665 } else {
5666 DCHECK(destination.IsStackSlot());
5667 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5668 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5669 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005670 } else if (source.IsDoubleStackSlot()) {
5671 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005672 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005673 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005674 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005675 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5676 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005677 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005678 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005679 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5680 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5681 }
Aart Bik5576f372017-03-23 16:17:37 -07005682 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005683 if (destination.IsFpuRegister()) {
5684 __ movups(destination.AsFpuRegister<XmmRegister>(),
5685 Address(CpuRegister(RSP), source.GetStackIndex()));
5686 } else {
5687 DCHECK(destination.IsSIMDStackSlot());
5688 size_t high = kX86_64WordSize;
5689 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5690 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5691 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5692 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5693 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005694 } else if (source.IsConstant()) {
5695 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005696 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5697 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005698 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005699 if (value == 0) {
5700 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5701 } else {
5702 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5703 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005704 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005705 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005706 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005707 }
5708 } else if (constant->IsLongConstant()) {
5709 int64_t value = constant->AsLongConstant()->GetValue();
5710 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005711 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005712 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005713 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005714 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005715 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005716 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005717 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005718 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005719 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005720 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005721 } else {
5722 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005723 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005724 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5725 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005726 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005727 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005728 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005729 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005730 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005731 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005732 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005733 } else {
5734 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005735 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005736 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005737 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005738 } else if (source.IsFpuRegister()) {
5739 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005740 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005741 } else if (destination.IsStackSlot()) {
5742 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005743 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005744 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005745 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005746 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005747 } else {
5748 DCHECK(destination.IsSIMDStackSlot());
5749 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5750 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005751 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005752 }
5753}
5754
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005755void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005756 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005757 __ movl(Address(CpuRegister(RSP), mem), reg);
5758 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005759}
5760
Mark Mendell8a1c7282015-06-29 15:41:28 -04005761void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5762 __ movq(CpuRegister(TMP), reg1);
5763 __ movq(reg1, reg2);
5764 __ movq(reg2, CpuRegister(TMP));
5765}
5766
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005767void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5768 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5769 __ movq(Address(CpuRegister(RSP), mem), reg);
5770 __ movq(reg, CpuRegister(TMP));
5771}
5772
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005773void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5774 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5775 __ movss(Address(CpuRegister(RSP), mem), reg);
5776 __ movd(reg, CpuRegister(TMP));
5777}
5778
5779void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5780 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5781 __ movsd(Address(CpuRegister(RSP), mem), reg);
5782 __ movd(reg, CpuRegister(TMP));
5783}
5784
Aart Bikcfe50bb2017-12-12 14:54:12 -08005785void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
5786 size_t extra_slot = 2 * kX86_64WordSize;
5787 __ subq(CpuRegister(RSP), Immediate(extra_slot));
5788 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
5789 ExchangeMemory64(0, mem + extra_slot, 2);
5790 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
5791 __ addq(CpuRegister(RSP), Immediate(extra_slot));
5792}
5793
5794void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
5795 ScratchRegisterScope ensure_scratch(
5796 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5797
5798 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5799 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5800 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5801 Address(CpuRegister(RSP), mem2 + stack_offset));
5802 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5803 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5804 CpuRegister(ensure_scratch.GetRegister()));
5805}
5806
5807void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
5808 ScratchRegisterScope ensure_scratch(
5809 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5810
5811 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5812
5813 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
5814 for (int i = 0; i < num_of_qwords; i++) {
5815 __ movq(CpuRegister(TMP),
5816 Address(CpuRegister(RSP), mem1 + stack_offset));
5817 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5818 Address(CpuRegister(RSP), mem2 + stack_offset));
5819 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
5820 CpuRegister(TMP));
5821 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5822 CpuRegister(ensure_scratch.GetRegister()));
5823 stack_offset += kX86_64WordSize;
5824 }
5825}
5826
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005827void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005828 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005829 Location source = move->GetSource();
5830 Location destination = move->GetDestination();
5831
5832 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005833 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005834 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005835 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005836 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005837 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005838 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005839 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005840 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005841 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005842 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005843 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005844 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005845 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005846 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005847 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5848 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5849 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005850 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005851 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005852 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005853 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005854 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005855 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005856 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005857 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08005858 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
5859 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
5860 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
5861 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5862 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
5863 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005864 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005865 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005866 }
5867}
5868
5869
5870void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5871 __ pushq(CpuRegister(reg));
5872}
5873
5874
5875void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5876 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005877}
5878
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005879void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005880 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00005881 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
5882 const size_t status_byte_offset =
5883 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01005884 constexpr uint32_t shifted_visibly_initialized_value =
5885 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00005886
Vladimir Markobf121912019-06-04 13:49:05 +01005887 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00005888 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005889 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005890}
5891
Vladimir Marko175e7862018-03-27 09:03:13 +00005892void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
5893 CpuRegister temp) {
5894 uint32_t path_to_root = check->GetBitstringPathToRoot();
5895 uint32_t mask = check->GetBitstringMask();
5896 DCHECK(IsPowerOfTwo(mask + 1));
5897 size_t mask_bits = WhichPowerOf2(mask + 1);
5898
5899 if (mask_bits == 16u) {
5900 // Compare the bitstring in memory.
5901 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
5902 } else {
5903 // /* uint32_t */ temp = temp->status_
5904 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
5905 // Compare the bitstring bits using SUB.
5906 __ subl(temp, Immediate(path_to_root));
5907 // Shift out bits that do not contribute to the comparison.
5908 __ shll(temp, Immediate(32u - mask_bits));
5909 }
5910}
5911
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005912HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5913 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005914 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005915 case HLoadClass::LoadKind::kInvalid:
5916 LOG(FATAL) << "UNREACHABLE";
5917 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005918 case HLoadClass::LoadKind::kReferrersClass:
5919 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005920 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005921 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005922 case HLoadClass::LoadKind::kBssEntry:
5923 DCHECK(!Runtime::Current()->UseJitCompilation());
5924 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005925 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005926 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005927 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005928 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005929 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005930 break;
5931 }
5932 return desired_class_load_kind;
5933}
5934
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005935void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005936 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005937 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005938 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005939 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005940 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005941 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005942 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005943 return;
5944 }
Vladimir Marko41559982017-01-06 14:04:23 +00005945 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005946
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005947 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5948 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005949 ? LocationSummary::kCallOnSlowPath
5950 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005951 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005952 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005953 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005954 }
5955
Vladimir Marko41559982017-01-06 14:04:23 +00005956 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005957 locations->SetInAt(0, Location::RequiresRegister());
5958 }
5959 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005960 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5961 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5962 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005963 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005964 } else {
5965 // For non-Baker read barrier we have a temp-clobbering call.
5966 }
5967 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005968}
5969
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005970Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01005971 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005972 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005973 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005974 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005975 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005976 PatchInfo<Label>* info = &jit_class_patches_.back();
5977 return &info->label;
5978}
5979
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005980// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5981// move.
5982void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005983 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005984 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005985 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005986 return;
5987 }
Vladimir Marko41559982017-01-06 14:04:23 +00005988 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005989
Vladimir Marko41559982017-01-06 14:04:23 +00005990 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005991 Location out_loc = locations->Out();
5992 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005993
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005994 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5995 ? kWithoutReadBarrier
5996 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005997 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005998 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005999 case HLoadClass::LoadKind::kReferrersClass: {
6000 DCHECK(!cls->CanCallRuntime());
6001 DCHECK(!cls->MustGenerateClinitCheck());
6002 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6003 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
6004 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006005 cls,
6006 out_loc,
6007 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006008 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006009 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006010 break;
6011 }
6012 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01006013 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6014 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006015 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Andreas Gampe3db70682018-12-26 15:12:03 -08006016 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006017 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006018 break;
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006019 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006020 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006021 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006022 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006023 break;
6024 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006025 case HLoadClass::LoadKind::kBssEntry: {
6026 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006027 /* no_rip= */ false);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006028 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6029 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
6030 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006031 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006032 generate_null_check = true;
6033 break;
6034 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006035 case HLoadClass::LoadKind::kJitBootImageAddress: {
6036 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6037 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6038 DCHECK_NE(address, 0u);
6039 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6040 break;
6041 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006042 case HLoadClass::LoadKind::kJitTableAddress: {
6043 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006044 /* no_rip= */ true);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006045 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006046 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006047 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006048 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006049 break;
6050 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006051 default:
6052 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
6053 UNREACHABLE();
6054 }
6055
6056 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6057 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006058 SlowPathCode* slow_path =
6059 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006060 codegen_->AddSlowPath(slow_path);
6061 if (generate_null_check) {
6062 __ testl(out, out);
6063 __ j(kEqual, slow_path->GetEntryLabel());
6064 }
6065 if (cls->MustGenerateClinitCheck()) {
6066 GenerateClassInitializationCheck(slow_path, out);
6067 } else {
6068 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006069 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006070 }
6071}
6072
6073void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
6074 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006075 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006076 locations->SetInAt(0, Location::RequiresRegister());
6077 if (check->HasUses()) {
6078 locations->SetOut(Location::SameAsFirstInput());
6079 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006080 // Rely on the type initialization to save everything we need.
6081 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006082}
6083
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006084void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6085 // Custom calling convention: RAX serves as both input and output.
6086 Location location = Location::RegisterLocation(RAX);
6087 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6088}
6089
6090void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6091 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6092}
6093
Orion Hodson18259d72018-04-12 11:18:23 +01006094void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6095 // Custom calling convention: RAX serves as both input and output.
6096 Location location = Location::RegisterLocation(RAX);
6097 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6098}
6099
6100void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6101 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6102}
6103
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006104void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006105 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006106 SlowPathCode* slow_path =
6107 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006108 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006109 GenerateClassInitializationCheck(slow_path,
6110 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006111}
6112
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006113HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
6114 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006115 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006116 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006117 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006118 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01006119 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006120 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006121 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006122 case HLoadString::LoadKind::kJitTableAddress:
6123 DCHECK(Runtime::Current()->UseJitCompilation());
6124 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006125 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006126 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006127 }
6128 return desired_string_load_kind;
6129}
6130
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006131void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006132 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006133 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006134 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006135 locations->SetOut(Location::RegisterLocation(RAX));
6136 } else {
6137 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006138 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
6139 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006140 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006141 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006142 } else {
6143 // For non-Baker read barrier we have a temp-clobbering call.
6144 }
6145 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006146 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006147}
6148
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006149Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006150 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006151 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006152 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006153 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006154 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006155 PatchInfo<Label>* info = &jit_string_patches_.back();
6156 return &info->label;
6157}
6158
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006159// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6160// move.
6161void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006162 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006163 Location out_loc = locations->Out();
6164 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006165
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006166 switch (load->GetLoadKind()) {
6167 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006168 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6169 codegen_->GetCompilerOptions().IsBootImageExtension());
Andreas Gampe3db70682018-12-26 15:12:03 -08006170 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006171 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006172 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006173 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006174 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006175 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006176 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006177 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006178 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006179 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006180 case HLoadString::LoadKind::kBssEntry: {
6181 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006182 /* no_rip= */ false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006183 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6184 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006185 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006186 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006187 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006188 codegen_->AddSlowPath(slow_path);
6189 __ testl(out, out);
6190 __ j(kEqual, slow_path->GetEntryLabel());
6191 __ Bind(slow_path->GetExitLabel());
6192 return;
6193 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006194 case HLoadString::LoadKind::kJitBootImageAddress: {
6195 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6196 DCHECK_NE(address, 0u);
6197 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6198 return;
6199 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006200 case HLoadString::LoadKind::kJitTableAddress: {
6201 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006202 /* no_rip= */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006203 Label* fixup_label = codegen_->NewJitRootStringPatch(
6204 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006205 // /* GcRoot<mirror::String> */ out = *address
6206 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6207 return;
6208 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006209 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006210 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006211 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006212
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006213 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006214 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006215 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006216 codegen_->InvokeRuntime(kQuickResolveString,
6217 load,
6218 load->GetDexPc());
6219 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006220}
6221
David Brazdilcb1c0552015-08-04 16:22:25 +01006222static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006223 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08006224 /* no_rip= */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006225}
6226
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006227void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6228 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006229 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006230 locations->SetOut(Location::RequiresRegister());
6231}
6232
6233void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006234 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6235}
6236
6237void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006238 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006239}
6240
6241void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6242 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006243}
6244
6245void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006246 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6247 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006248 InvokeRuntimeCallingConvention calling_convention;
6249 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6250}
6251
6252void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006253 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006254 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006255}
6256
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006257// Temp is used for read barrier.
6258static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6259 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006260 !kUseBakerReadBarrier &&
6261 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006262 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006263 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6264 return 1;
6265 }
6266 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006267}
6268
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006269// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6270// interface pointer, the current interface is compared in memory.
6271// The other checks have one temp for loading the object's class.
6272static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6273 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6274 return 2;
6275 }
6276 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006277}
6278
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006279void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006280 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006281 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006282 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006283 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006284 case TypeCheckKind::kExactCheck:
6285 case TypeCheckKind::kAbstractClassCheck:
6286 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006287 case TypeCheckKind::kArrayObjectCheck: {
6288 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6289 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6290 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006291 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006292 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006293 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006294 case TypeCheckKind::kUnresolvedCheck:
6295 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006296 call_kind = LocationSummary::kCallOnSlowPath;
6297 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006298 case TypeCheckKind::kBitstringCheck:
6299 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006300 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006301
Vladimir Markoca6fff82017-10-03 14:49:14 +01006302 LocationSummary* locations =
6303 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006304 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006305 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006306 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006307 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006308 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6309 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6310 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6311 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6312 } else {
6313 locations->SetInAt(1, Location::Any());
6314 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006315 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6316 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006317 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006318}
6319
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006320void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006321 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006322 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006323 Location obj_loc = locations->InAt(0);
6324 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006325 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006326 Location out_loc = locations->Out();
6327 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006328 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6329 DCHECK_LE(num_temps, 1u);
6330 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006331 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006332 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6333 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6334 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006335 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006336 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006337
6338 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006339 // Avoid null check if we know obj is not null.
6340 if (instruction->MustDoNullCheck()) {
6341 __ testl(obj, obj);
6342 __ j(kEqual, &zero);
6343 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006344
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006345 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006346 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006347 ReadBarrierOption read_barrier_option =
6348 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006349 // /* HeapReference<Class> */ out = obj->klass_
6350 GenerateReferenceLoadTwoRegisters(instruction,
6351 out_loc,
6352 obj_loc,
6353 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006354 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006355 if (cls.IsRegister()) {
6356 __ cmpl(out, cls.AsRegister<CpuRegister>());
6357 } else {
6358 DCHECK(cls.IsStackSlot()) << cls;
6359 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6360 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006361 if (zero.IsLinked()) {
6362 // Classes must be equal for the instanceof to succeed.
6363 __ j(kNotEqual, &zero);
6364 __ movl(out, Immediate(1));
6365 __ jmp(&done);
6366 } else {
6367 __ setcc(kEqual, out);
6368 // setcc only sets the low byte.
6369 __ andl(out, Immediate(1));
6370 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006371 break;
6372 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006373
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006374 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006375 ReadBarrierOption read_barrier_option =
6376 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006377 // /* HeapReference<Class> */ out = obj->klass_
6378 GenerateReferenceLoadTwoRegisters(instruction,
6379 out_loc,
6380 obj_loc,
6381 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006382 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006383 // If the class is abstract, we eagerly fetch the super class of the
6384 // object to avoid doing a comparison we know will fail.
6385 NearLabel loop, success;
6386 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006387 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006388 GenerateReferenceLoadOneRegister(instruction,
6389 out_loc,
6390 super_offset,
6391 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006392 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006393 __ testl(out, out);
6394 // If `out` is null, we use it for the result, and jump to `done`.
6395 __ j(kEqual, &done);
6396 if (cls.IsRegister()) {
6397 __ cmpl(out, cls.AsRegister<CpuRegister>());
6398 } else {
6399 DCHECK(cls.IsStackSlot()) << cls;
6400 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6401 }
6402 __ j(kNotEqual, &loop);
6403 __ movl(out, Immediate(1));
6404 if (zero.IsLinked()) {
6405 __ jmp(&done);
6406 }
6407 break;
6408 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006409
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006410 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006411 ReadBarrierOption read_barrier_option =
6412 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006413 // /* HeapReference<Class> */ out = obj->klass_
6414 GenerateReferenceLoadTwoRegisters(instruction,
6415 out_loc,
6416 obj_loc,
6417 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006418 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006419 // Walk over the class hierarchy to find a match.
6420 NearLabel loop, success;
6421 __ Bind(&loop);
6422 if (cls.IsRegister()) {
6423 __ cmpl(out, cls.AsRegister<CpuRegister>());
6424 } else {
6425 DCHECK(cls.IsStackSlot()) << cls;
6426 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6427 }
6428 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006429 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006430 GenerateReferenceLoadOneRegister(instruction,
6431 out_loc,
6432 super_offset,
6433 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006434 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006435 __ testl(out, out);
6436 __ j(kNotEqual, &loop);
6437 // If `out` is null, we use it for the result, and jump to `done`.
6438 __ jmp(&done);
6439 __ Bind(&success);
6440 __ movl(out, Immediate(1));
6441 if (zero.IsLinked()) {
6442 __ jmp(&done);
6443 }
6444 break;
6445 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006446
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006447 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006448 ReadBarrierOption read_barrier_option =
6449 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006450 // /* HeapReference<Class> */ out = obj->klass_
6451 GenerateReferenceLoadTwoRegisters(instruction,
6452 out_loc,
6453 obj_loc,
6454 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006455 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006456 // Do an exact check.
6457 NearLabel exact_check;
6458 if (cls.IsRegister()) {
6459 __ cmpl(out, cls.AsRegister<CpuRegister>());
6460 } else {
6461 DCHECK(cls.IsStackSlot()) << cls;
6462 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6463 }
6464 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006465 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006466 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006467 GenerateReferenceLoadOneRegister(instruction,
6468 out_loc,
6469 component_offset,
6470 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006471 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006472 __ testl(out, out);
6473 // If `out` is null, we use it for the result, and jump to `done`.
6474 __ j(kEqual, &done);
6475 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6476 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006477 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006478 __ movl(out, Immediate(1));
6479 __ jmp(&done);
6480 break;
6481 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006482
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006483 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006484 // No read barrier since the slow path will retry upon failure.
6485 // /* HeapReference<Class> */ out = obj->klass_
6486 GenerateReferenceLoadTwoRegisters(instruction,
6487 out_loc,
6488 obj_loc,
6489 class_offset,
6490 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006491 if (cls.IsRegister()) {
6492 __ cmpl(out, cls.AsRegister<CpuRegister>());
6493 } else {
6494 DCHECK(cls.IsStackSlot()) << cls;
6495 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6496 }
6497 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006498 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006499 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006500 codegen_->AddSlowPath(slow_path);
6501 __ j(kNotEqual, slow_path->GetEntryLabel());
6502 __ movl(out, Immediate(1));
6503 if (zero.IsLinked()) {
6504 __ jmp(&done);
6505 }
6506 break;
6507 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006508
Calin Juravle98893e12015-10-02 21:05:03 +01006509 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006510 case TypeCheckKind::kInterfaceCheck: {
6511 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006512 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006513 // cases.
6514 //
6515 // We cannot directly call the InstanceofNonTrivial runtime
6516 // entry point without resorting to a type checking slow path
6517 // here (i.e. by calling InvokeRuntime directly), as it would
6518 // require to assign fixed registers for the inputs of this
6519 // HInstanceOf instruction (following the runtime calling
6520 // convention), which might be cluttered by the potential first
6521 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006522 //
6523 // TODO: Introduce a new runtime entry point taking the object
6524 // to test (instead of its class) as argument, and let it deal
6525 // with the read barrier issues. This will let us refactor this
6526 // case of the `switch` code as it was previously (with a direct
6527 // call to the runtime not using a type checking slow path).
6528 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006529 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006530 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006531 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006532 codegen_->AddSlowPath(slow_path);
6533 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006534 if (zero.IsLinked()) {
6535 __ jmp(&done);
6536 }
6537 break;
6538 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006539
6540 case TypeCheckKind::kBitstringCheck: {
6541 // /* HeapReference<Class> */ temp = obj->klass_
6542 GenerateReferenceLoadTwoRegisters(instruction,
6543 out_loc,
6544 obj_loc,
6545 class_offset,
6546 kWithoutReadBarrier);
6547
6548 GenerateBitstringTypeCheckCompare(instruction, out);
6549 if (zero.IsLinked()) {
6550 __ j(kNotEqual, &zero);
6551 __ movl(out, Immediate(1));
6552 __ jmp(&done);
6553 } else {
6554 __ setcc(kEqual, out);
6555 // setcc only sets the low byte.
6556 __ andl(out, Immediate(1));
6557 }
6558 break;
6559 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006560 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006561
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006562 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006563 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006564 __ xorl(out, out);
6565 }
6566
6567 if (done.IsLinked()) {
6568 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006569 }
6570
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006571 if (slow_path != nullptr) {
6572 __ Bind(slow_path->GetExitLabel());
6573 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006574}
6575
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006576void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006577 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006578 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006579 LocationSummary* locations =
6580 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006581 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006582 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6583 // Require a register for the interface check since there is a loop that compares the class to
6584 // a memory address.
6585 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006586 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6587 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6588 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6589 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006590 } else {
6591 locations->SetInAt(1, Location::Any());
6592 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006593 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6594 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006595}
6596
6597void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006598 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006599 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006600 Location obj_loc = locations->InAt(0);
6601 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006602 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006603 Location temp_loc = locations->GetTemp(0);
6604 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006605 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6606 DCHECK_GE(num_temps, 1u);
6607 DCHECK_LE(num_temps, 2u);
6608 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006609 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6610 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6611 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6612 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6613 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6614 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006615 const uint32_t object_array_data_offset =
6616 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006617
Vladimir Marko87584542017-12-12 17:47:52 +00006618 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006619 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006620 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6621 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006622 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006623
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006624
6625 NearLabel done;
6626 // Avoid null check if we know obj is not null.
6627 if (instruction->MustDoNullCheck()) {
6628 __ testl(obj, obj);
6629 __ j(kEqual, &done);
6630 }
6631
Roland Levillain0d5a2812015-11-13 10:07:31 +00006632 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006633 case TypeCheckKind::kExactCheck:
6634 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006635 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006636 GenerateReferenceLoadTwoRegisters(instruction,
6637 temp_loc,
6638 obj_loc,
6639 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006640 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006641 if (cls.IsRegister()) {
6642 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6643 } else {
6644 DCHECK(cls.IsStackSlot()) << cls;
6645 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6646 }
6647 // Jump to slow path for throwing the exception or doing a
6648 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006649 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006650 break;
6651 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006652
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006653 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006654 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006655 GenerateReferenceLoadTwoRegisters(instruction,
6656 temp_loc,
6657 obj_loc,
6658 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006659 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006660 // If the class is abstract, we eagerly fetch the super class of the
6661 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006662 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006663 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006664 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006665 GenerateReferenceLoadOneRegister(instruction,
6666 temp_loc,
6667 super_offset,
6668 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006669 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006670
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006671 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6672 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006673 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006674 // Otherwise, compare the classes.
6675 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006676 if (cls.IsRegister()) {
6677 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6678 } else {
6679 DCHECK(cls.IsStackSlot()) << cls;
6680 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6681 }
6682 __ j(kNotEqual, &loop);
6683 break;
6684 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006685
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006686 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006687 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006688 GenerateReferenceLoadTwoRegisters(instruction,
6689 temp_loc,
6690 obj_loc,
6691 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006692 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006693 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006694 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006695 __ Bind(&loop);
6696 if (cls.IsRegister()) {
6697 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6698 } else {
6699 DCHECK(cls.IsStackSlot()) << cls;
6700 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6701 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006702 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006703
Roland Levillain0d5a2812015-11-13 10:07:31 +00006704 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006705 GenerateReferenceLoadOneRegister(instruction,
6706 temp_loc,
6707 super_offset,
6708 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006709 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006710
6711 // If the class reference currently in `temp` is not null, jump
6712 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006713 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006714 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006715 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006716 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006717 break;
6718 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006719
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006720 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006721 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006722 GenerateReferenceLoadTwoRegisters(instruction,
6723 temp_loc,
6724 obj_loc,
6725 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006726 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006727 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006728 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006729 if (cls.IsRegister()) {
6730 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6731 } else {
6732 DCHECK(cls.IsStackSlot()) << cls;
6733 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6734 }
6735 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006736
6737 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006738 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006739 GenerateReferenceLoadOneRegister(instruction,
6740 temp_loc,
6741 component_offset,
6742 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006743 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006744
6745 // If the component type is not null (i.e. the object is indeed
6746 // an array), jump to label `check_non_primitive_component_type`
6747 // to further check that this component type is not a primitive
6748 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006749 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006750 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006751 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006752 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006753 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006754 break;
6755 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006756
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006757 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006758 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006759 //
6760 // We cannot directly call the CheckCast runtime entry point
6761 // without resorting to a type checking slow path here (i.e. by
6762 // calling InvokeRuntime directly), as it would require to
6763 // assign fixed registers for the inputs of this HInstanceOf
6764 // instruction (following the runtime calling convention), which
6765 // might be cluttered by the potential first read barrier
6766 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006767 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006768 break;
6769 }
6770
Vladimir Marko175e7862018-03-27 09:03:13 +00006771 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006772 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
6773 // We can not get false positives by doing this.
6774 // /* HeapReference<Class> */ temp = obj->klass_
6775 GenerateReferenceLoadTwoRegisters(instruction,
6776 temp_loc,
6777 obj_loc,
6778 class_offset,
6779 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006780
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006781 // /* HeapReference<Class> */ temp = temp->iftable_
6782 GenerateReferenceLoadTwoRegisters(instruction,
6783 temp_loc,
6784 temp_loc,
6785 iftable_offset,
6786 kWithoutReadBarrier);
6787 // Iftable is never null.
6788 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
6789 // Maybe poison the `cls` for direct comparison with memory.
6790 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
6791 // Loop through the iftable and check if any class matches.
6792 NearLabel start_loop;
6793 __ Bind(&start_loop);
6794 // Need to subtract first to handle the empty array case.
6795 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6796 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6797 // Go to next interface if the classes do not match.
6798 __ cmpl(cls.AsRegister<CpuRegister>(),
6799 CodeGeneratorX86_64::ArrayAddress(temp,
6800 maybe_temp2_loc,
6801 TIMES_4,
6802 object_array_data_offset));
6803 __ j(kNotEqual, &start_loop); // Return if same class.
6804 // If `cls` was poisoned above, unpoison it.
6805 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006806 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006807 }
6808
6809 case TypeCheckKind::kBitstringCheck: {
6810 // /* HeapReference<Class> */ temp = obj->klass_
6811 GenerateReferenceLoadTwoRegisters(instruction,
6812 temp_loc,
6813 obj_loc,
6814 class_offset,
6815 kWithoutReadBarrier);
6816
6817 GenerateBitstringTypeCheckCompare(instruction, temp);
6818 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
6819 break;
6820 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006821 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006822
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006823 if (done.IsLinked()) {
6824 __ Bind(&done);
6825 }
6826
Roland Levillain0d5a2812015-11-13 10:07:31 +00006827 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006828}
6829
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006830void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006831 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6832 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006833 InvokeRuntimeCallingConvention calling_convention;
6834 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6835}
6836
6837void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006838 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006839 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006840 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006841 if (instruction->IsEnter()) {
6842 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6843 } else {
6844 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6845 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006846}
6847
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05306848void LocationsBuilderX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6849 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6850 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6851 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6852 locations->SetInAt(0, Location::RequiresRegister());
6853 // There is no immediate variant of negated bitwise and in X86.
6854 locations->SetInAt(1, Location::RequiresRegister());
6855 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6856}
6857
6858void LocationsBuilderX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6859 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6860 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6861 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6862 locations->SetInAt(0, Location::RequiresRegister());
6863 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6864}
6865
6866void InstructionCodeGeneratorX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6867 LocationSummary* locations = instruction->GetLocations();
6868 Location first = locations->InAt(0);
6869 Location second = locations->InAt(1);
6870 Location dest = locations->Out();
6871 __ andn(dest.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
6872}
6873
6874void InstructionCodeGeneratorX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6875 LocationSummary* locations = instruction->GetLocations();
6876 Location src = locations->InAt(0);
6877 Location dest = locations->Out();
6878 switch (instruction->GetOpKind()) {
6879 case HInstruction::kAnd:
6880 __ blsr(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6881 break;
6882 case HInstruction::kXor:
6883 __ blsmsk(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6884 break;
6885 default:
6886 LOG(FATAL) << "Unreachable";
6887 }
6888}
6889
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006890void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6891void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6892void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6893
6894void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6895 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006896 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006897 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
6898 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006899 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006900 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006901 locations->SetOut(Location::SameAsFirstInput());
6902}
6903
6904void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6905 HandleBitwiseOperation(instruction);
6906}
6907
6908void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6909 HandleBitwiseOperation(instruction);
6910}
6911
6912void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6913 HandleBitwiseOperation(instruction);
6914}
6915
6916void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6917 LocationSummary* locations = instruction->GetLocations();
6918 Location first = locations->InAt(0);
6919 Location second = locations->InAt(1);
6920 DCHECK(first.Equals(locations->Out()));
6921
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006922 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006923 if (second.IsRegister()) {
6924 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006925 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006926 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006927 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006928 } else {
6929 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006930 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006931 }
6932 } else if (second.IsConstant()) {
6933 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6934 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006935 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006936 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006937 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006938 } else {
6939 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006940 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006941 }
6942 } else {
6943 Address address(CpuRegister(RSP), second.GetStackIndex());
6944 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006945 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006946 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006947 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006948 } else {
6949 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006950 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006951 }
6952 }
6953 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006954 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006955 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6956 bool second_is_constant = false;
6957 int64_t value = 0;
6958 if (second.IsConstant()) {
6959 second_is_constant = true;
6960 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006961 }
Mark Mendell40741f32015-04-20 22:10:34 -04006962 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006963
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006964 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006965 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006966 if (is_int32_value) {
6967 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6968 } else {
6969 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6970 }
6971 } else if (second.IsDoubleStackSlot()) {
6972 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006973 } else {
6974 __ andq(first_reg, second.AsRegister<CpuRegister>());
6975 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006976 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006977 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006978 if (is_int32_value) {
6979 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6980 } else {
6981 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6982 }
6983 } else if (second.IsDoubleStackSlot()) {
6984 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006985 } else {
6986 __ orq(first_reg, second.AsRegister<CpuRegister>());
6987 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006988 } else {
6989 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006990 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006991 if (is_int32_value) {
6992 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6993 } else {
6994 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6995 }
6996 } else if (second.IsDoubleStackSlot()) {
6997 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006998 } else {
6999 __ xorq(first_reg, second.AsRegister<CpuRegister>());
7000 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007001 }
7002 }
7003}
7004
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007005void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
7006 HInstruction* instruction,
7007 Location out,
7008 uint32_t offset,
7009 Location maybe_temp,
7010 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007011 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007012 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007013 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007014 if (kUseBakerReadBarrier) {
7015 // Load with fast path based Baker's read barrier.
7016 // /* HeapReference<Object> */ out = *(out + offset)
7017 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007018 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007019 } else {
7020 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007021 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007022 // in the following move operation, as we will need it for the
7023 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007024 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007025 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007026 // /* HeapReference<Object> */ out = *(out + offset)
7027 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007028 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007029 }
7030 } else {
7031 // Plain load with no read barrier.
7032 // /* HeapReference<Object> */ out = *(out + offset)
7033 __ movl(out_reg, Address(out_reg, offset));
7034 __ MaybeUnpoisonHeapReference(out_reg);
7035 }
7036}
7037
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007038void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
7039 HInstruction* instruction,
7040 Location out,
7041 Location obj,
7042 uint32_t offset,
7043 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007044 CpuRegister out_reg = out.AsRegister<CpuRegister>();
7045 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007046 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007047 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007048 if (kUseBakerReadBarrier) {
7049 // Load with fast path based Baker's read barrier.
7050 // /* HeapReference<Object> */ out = *(obj + offset)
7051 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007052 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007053 } else {
7054 // Load with slow path based read barrier.
7055 // /* HeapReference<Object> */ out = *(obj + offset)
7056 __ movl(out_reg, Address(obj_reg, offset));
7057 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7058 }
7059 } else {
7060 // Plain load with no read barrier.
7061 // /* HeapReference<Object> */ out = *(obj + offset)
7062 __ movl(out_reg, Address(obj_reg, offset));
7063 __ MaybeUnpoisonHeapReference(out_reg);
7064 }
7065}
7066
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007067void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
7068 HInstruction* instruction,
7069 Location root,
7070 const Address& address,
7071 Label* fixup_label,
7072 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007073 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007074 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007075 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007076 if (kUseBakerReadBarrier) {
7077 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7078 // Baker's read barrier are used:
7079 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007080 // root = obj.field;
7081 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7082 // if (temp != null) {
7083 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007084 // }
7085
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007086 // /* GcRoot<mirror::Object> */ root = *address
7087 __ movl(root_reg, address);
7088 if (fixup_label != nullptr) {
7089 __ Bind(fixup_label);
7090 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007091 static_assert(
7092 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7093 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7094 "have different sizes.");
7095 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7096 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7097 "have different sizes.");
7098
Vladimir Marko953437b2016-08-24 08:30:46 +00007099 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007100 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007101 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007102 codegen_->AddSlowPath(slow_path);
7103
Roland Levillaind966ce72017-02-09 16:20:14 +00007104 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
7105 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007106 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Andreas Gampe3db70682018-12-26 15:12:03 -08007107 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip= */ true), Immediate(0));
Roland Levillaind966ce72017-02-09 16:20:14 +00007108 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007109 __ j(kNotEqual, slow_path->GetEntryLabel());
7110 __ Bind(slow_path->GetExitLabel());
7111 } else {
7112 // GC root loaded through a slow path for read barriers other
7113 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007114 // /* GcRoot<mirror::Object>* */ root = address
7115 __ leaq(root_reg, address);
7116 if (fixup_label != nullptr) {
7117 __ Bind(fixup_label);
7118 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007119 // /* mirror::Object* */ root = root->Read()
7120 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7121 }
7122 } else {
7123 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007124 // /* GcRoot<mirror::Object> */ root = *address
7125 __ movl(root_reg, address);
7126 if (fixup_label != nullptr) {
7127 __ Bind(fixup_label);
7128 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007129 // Note that GC roots are not affected by heap poisoning, thus we
7130 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007131 }
7132}
7133
7134void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7135 Location ref,
7136 CpuRegister obj,
7137 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007138 bool needs_null_check) {
7139 DCHECK(kEmitCompilerReadBarrier);
7140 DCHECK(kUseBakerReadBarrier);
7141
7142 // /* HeapReference<Object> */ ref = *(obj + offset)
7143 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007144 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007145}
7146
7147void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7148 Location ref,
7149 CpuRegister obj,
7150 uint32_t data_offset,
7151 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007152 bool needs_null_check) {
7153 DCHECK(kEmitCompilerReadBarrier);
7154 DCHECK(kUseBakerReadBarrier);
7155
Roland Levillain3d312422016-06-23 13:53:42 +01007156 static_assert(
7157 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7158 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007159 // /* HeapReference<Object> */ ref =
7160 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007161 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007162 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007163}
7164
7165void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7166 Location ref,
7167 CpuRegister obj,
7168 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007169 bool needs_null_check,
7170 bool always_update_field,
7171 CpuRegister* temp1,
7172 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007173 DCHECK(kEmitCompilerReadBarrier);
7174 DCHECK(kUseBakerReadBarrier);
7175
7176 // In slow path based read barriers, the read barrier call is
7177 // inserted after the original load. However, in fast path based
7178 // Baker's read barriers, we need to perform the load of
7179 // mirror::Object::monitor_ *before* the original reference load.
7180 // This load-load ordering is required by the read barrier.
7181 // The fast path/slow path (for Baker's algorithm) should look like:
7182 //
7183 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7184 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7185 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007186 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007187 // if (is_gray) {
7188 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7189 // }
7190 //
7191 // Note: the original implementation in ReadBarrier::Barrier is
7192 // slightly more complex as:
7193 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007194 // the high-bits of rb_state, which are expected to be all zeroes
7195 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
7196 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007197 // - it performs additional checks that we do not do here for
7198 // performance reasons.
7199
7200 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007201 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7202
Vladimir Marko953437b2016-08-24 08:30:46 +00007203 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007204 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007205 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007206 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7207 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7208 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7209
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007210 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007211 // ref = ReadBarrier::Mark(ref);
7212 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7213 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007214 if (needs_null_check) {
7215 MaybeRecordImplicitNullCheck(instruction);
7216 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007217
7218 // Load fence to prevent load-load reordering.
7219 // Note that this is a no-op, thanks to the x86-64 memory model.
7220 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7221
7222 // The actual reference load.
7223 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007224 __ movl(ref_reg, src); // Flags are unaffected.
7225
7226 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7227 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007228 SlowPathCode* slow_path;
7229 if (always_update_field) {
7230 DCHECK(temp1 != nullptr);
7231 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007232 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007233 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp1, *temp2);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007234 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007235 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007236 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007237 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007238 AddSlowPath(slow_path);
7239
7240 // We have done the "if" of the gray bit check above, now branch based on the flags.
7241 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007242
7243 // Object* ref = ref_addr->AsMirrorPtr()
7244 __ MaybeUnpoisonHeapReference(ref_reg);
7245
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007246 __ Bind(slow_path->GetExitLabel());
7247}
7248
7249void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7250 Location out,
7251 Location ref,
7252 Location obj,
7253 uint32_t offset,
7254 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007255 DCHECK(kEmitCompilerReadBarrier);
7256
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007257 // Insert a slow path based read barrier *after* the reference load.
7258 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007259 // If heap poisoning is enabled, the unpoisoning of the loaded
7260 // reference will be carried out by the runtime within the slow
7261 // path.
7262 //
7263 // Note that `ref` currently does not get unpoisoned (when heap
7264 // poisoning is enabled), which is alright as the `ref` argument is
7265 // not used by the artReadBarrierSlow entry point.
7266 //
7267 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007268 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007269 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7270 AddSlowPath(slow_path);
7271
Roland Levillain0d5a2812015-11-13 10:07:31 +00007272 __ jmp(slow_path->GetEntryLabel());
7273 __ Bind(slow_path->GetExitLabel());
7274}
7275
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007276void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7277 Location out,
7278 Location ref,
7279 Location obj,
7280 uint32_t offset,
7281 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007282 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007283 // Baker's read barriers shall be handled by the fast path
7284 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7285 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007286 // If heap poisoning is enabled, unpoisoning will be taken care of
7287 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007288 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007289 } else if (kPoisonHeapReferences) {
7290 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7291 }
7292}
7293
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007294void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7295 Location out,
7296 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007297 DCHECK(kEmitCompilerReadBarrier);
7298
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007299 // Insert a slow path based read barrier *after* the GC root load.
7300 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007301 // Note that GC roots are not affected by heap poisoning, so we do
7302 // not need to do anything special for this here.
7303 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007304 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007305 AddSlowPath(slow_path);
7306
Roland Levillain0d5a2812015-11-13 10:07:31 +00007307 __ jmp(slow_path->GetEntryLabel());
7308 __ Bind(slow_path->GetExitLabel());
7309}
7310
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007311void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007312 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007313 LOG(FATAL) << "Unreachable";
7314}
7315
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007316void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007317 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007318 LOG(FATAL) << "Unreachable";
7319}
7320
Mark Mendellfe57faa2015-09-18 09:26:15 -04007321// Simple implementation of packed switch - generate cascaded compare/jumps.
7322void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7323 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007324 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007325 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007326 locations->AddTemp(Location::RequiresRegister());
7327 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007328}
7329
7330void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7331 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007332 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007333 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007334 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7335 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7336 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007337 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7338
7339 // Should we generate smaller inline compare/jumps?
7340 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7341 // Figure out the correct compare values and jump conditions.
7342 // Handle the first compare/branch as a special case because it might
7343 // jump to the default case.
7344 DCHECK_GT(num_entries, 2u);
7345 Condition first_condition;
7346 uint32_t index;
7347 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7348 if (lower_bound != 0) {
7349 first_condition = kLess;
7350 __ cmpl(value_reg_in, Immediate(lower_bound));
7351 __ j(first_condition, codegen_->GetLabelOf(default_block));
7352 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7353
7354 index = 1;
7355 } else {
7356 // Handle all the compare/jumps below.
7357 first_condition = kBelow;
7358 index = 0;
7359 }
7360
7361 // Handle the rest of the compare/jumps.
7362 for (; index + 1 < num_entries; index += 2) {
7363 int32_t compare_to_value = lower_bound + index + 1;
7364 __ cmpl(value_reg_in, Immediate(compare_to_value));
7365 // Jump to successors[index] if value < case_value[index].
7366 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7367 // Jump to successors[index + 1] if value == case_value[index + 1].
7368 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7369 }
7370
7371 if (index != num_entries) {
7372 // There are an odd number of entries. Handle the last one.
7373 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007374 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007375 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7376 }
7377
7378 // And the default for any other value.
7379 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7380 __ jmp(codegen_->GetLabelOf(default_block));
7381 }
7382 return;
7383 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007384
7385 // Remove the bias, if needed.
7386 Register value_reg_out = value_reg_in.AsRegister();
7387 if (lower_bound != 0) {
7388 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7389 value_reg_out = temp_reg.AsRegister();
7390 }
7391 CpuRegister value_reg(value_reg_out);
7392
7393 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007394 __ cmpl(value_reg, Immediate(num_entries - 1));
7395 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007396
Mark Mendell9c86b482015-09-18 13:36:07 -04007397 // We are in the range of the table.
7398 // Load the address of the jump table in the constant area.
7399 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007400
Mark Mendell9c86b482015-09-18 13:36:07 -04007401 // Load the (signed) offset from the jump table.
7402 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7403
7404 // Add the offset to the address of the table base.
7405 __ addq(temp_reg, base_reg);
7406
7407 // And jump.
7408 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007409}
7410
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007411void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7412 ATTRIBUTE_UNUSED) {
7413 LOG(FATAL) << "Unreachable";
7414}
7415
7416void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7417 ATTRIBUTE_UNUSED) {
7418 LOG(FATAL) << "Unreachable";
7419}
7420
Aart Bikc5d47542016-01-27 17:00:35 -08007421void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7422 if (value == 0) {
7423 __ xorl(dest, dest);
7424 } else {
7425 __ movl(dest, Immediate(value));
7426 }
7427}
7428
Mark Mendell92e83bf2015-05-07 11:25:03 -04007429void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7430 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007431 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007432 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007433 } else if (IsUint<32>(value)) {
7434 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007435 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7436 } else {
7437 __ movq(dest, Immediate(value));
7438 }
7439}
7440
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007441void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7442 if (value == 0) {
7443 __ xorps(dest, dest);
7444 } else {
7445 __ movss(dest, LiteralInt32Address(value));
7446 }
7447}
7448
7449void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7450 if (value == 0) {
7451 __ xorpd(dest, dest);
7452 } else {
7453 __ movsd(dest, LiteralInt64Address(value));
7454 }
7455}
7456
7457void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7458 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7459}
7460
7461void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7462 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7463}
7464
Aart Bika19616e2016-02-01 18:57:58 -08007465void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7466 if (value == 0) {
7467 __ testl(dest, dest);
7468 } else {
7469 __ cmpl(dest, Immediate(value));
7470 }
7471}
7472
7473void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7474 if (IsInt<32>(value)) {
7475 if (value == 0) {
7476 __ testq(dest, dest);
7477 } else {
7478 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7479 }
7480 } else {
7481 // Value won't fit in an int.
7482 __ cmpq(dest, LiteralInt64Address(value));
7483 }
7484}
7485
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007486void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7487 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007488 GenerateIntCompare(lhs_reg, rhs);
7489}
7490
7491void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007492 if (rhs.IsConstant()) {
7493 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007494 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007495 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007496 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007497 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007498 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007499 }
7500}
7501
7502void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7503 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7504 if (rhs.IsConstant()) {
7505 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7506 Compare64BitValue(lhs_reg, value);
7507 } else if (rhs.IsDoubleStackSlot()) {
7508 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7509 } else {
7510 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7511 }
7512}
7513
7514Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7515 Location index,
7516 ScaleFactor scale,
7517 uint32_t data_offset) {
7518 return index.IsConstant() ?
7519 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7520 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7521}
7522
Mark Mendellcfa410b2015-05-25 16:02:44 -04007523void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7524 DCHECK(dest.IsDoubleStackSlot());
7525 if (IsInt<32>(value)) {
7526 // Can move directly as an int32 constant.
7527 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7528 Immediate(static_cast<int32_t>(value)));
7529 } else {
7530 Load64BitValue(CpuRegister(TMP), value);
7531 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7532 }
7533}
7534
Mark Mendell9c86b482015-09-18 13:36:07 -04007535/**
7536 * Class to handle late fixup of offsets into constant area.
7537 */
7538class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7539 public:
7540 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7541 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7542
7543 protected:
7544 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7545
7546 CodeGeneratorX86_64* codegen_;
7547
7548 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01007549 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell9c86b482015-09-18 13:36:07 -04007550 // Patch the correct offset for the instruction. We use the address of the
7551 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7552 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7553 int32_t relative_position = constant_offset - pos;
7554
7555 // Patch in the right value.
7556 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7557 }
7558
7559 // Location in constant area that the fixup refers to.
7560 size_t offset_into_constant_area_;
7561};
7562
7563/**
7564 t * Class to handle late fixup of offsets to a jump table that will be created in the
7565 * constant area.
7566 */
7567class JumpTableRIPFixup : public RIPFixup {
7568 public:
7569 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7570 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7571
7572 void CreateJumpTable() {
7573 X86_64Assembler* assembler = codegen_->GetAssembler();
7574
7575 // Ensure that the reference to the jump table has the correct offset.
7576 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7577 SetOffset(offset_in_constant_table);
7578
7579 // Compute the offset from the start of the function to this jump table.
7580 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7581
7582 // Populate the jump table with the correct values for the jump table.
7583 int32_t num_entries = switch_instr_->GetNumEntries();
7584 HBasicBlock* block = switch_instr_->GetBlock();
7585 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7586 // The value that we want is the target offset - the position of the table.
7587 for (int32_t i = 0; i < num_entries; i++) {
7588 HBasicBlock* b = successors[i];
7589 Label* l = codegen_->GetLabelOf(b);
7590 DCHECK(l->IsBound());
7591 int32_t offset_to_block = l->Position() - current_table_offset;
7592 assembler->AppendInt32(offset_to_block);
7593 }
7594 }
7595
7596 private:
7597 const HPackedSwitch* switch_instr_;
7598};
7599
Mark Mendellf55c3e02015-03-26 21:07:46 -04007600void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7601 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007602 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007603 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7604 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007605 assembler->Align(4, 0);
7606 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007607
7608 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007609 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007610 jump_table->CreateJumpTable();
7611 }
7612
7613 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007614 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007615 }
7616
7617 // And finish up.
7618 CodeGenerator::Finalize(allocator);
7619}
7620
Mark Mendellf55c3e02015-03-26 21:07:46 -04007621Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007622 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007623 return Address::RIP(fixup);
7624}
7625
7626Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007627 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007628 return Address::RIP(fixup);
7629}
7630
7631Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007632 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007633 return Address::RIP(fixup);
7634}
7635
7636Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007637 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007638 return Address::RIP(fixup);
7639}
7640
Andreas Gampe85b62f22015-09-09 13:15:38 -07007641// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007642void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007643 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007644 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007645 return;
7646 }
7647
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007648 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007649
7650 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7651 if (trg.Equals(return_loc)) {
7652 return;
7653 }
7654
7655 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007656 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007657 parallel_move.AddMove(return_loc, trg, type, nullptr);
7658 GetMoveResolver()->EmitNativeCode(&parallel_move);
7659}
7660
Mark Mendell9c86b482015-09-18 13:36:07 -04007661Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7662 // Create a fixup to be used to create and address the jump table.
7663 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007664 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007665
7666 // We have to populate the jump tables.
7667 fixups_to_jump_tables_.push_back(table_fixup);
7668 return Address::RIP(table_fixup);
7669}
7670
Mark Mendellea5af682015-10-22 17:35:49 -04007671void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7672 const Address& addr_high,
7673 int64_t v,
7674 HInstruction* instruction) {
7675 if (IsInt<32>(v)) {
7676 int32_t v_32 = v;
7677 __ movq(addr_low, Immediate(v_32));
7678 MaybeRecordImplicitNullCheck(instruction);
7679 } else {
7680 // Didn't fit in a register. Do it in pieces.
7681 int32_t low_v = Low32Bits(v);
7682 int32_t high_v = High32Bits(v);
7683 __ movl(addr_low, Immediate(low_v));
7684 MaybeRecordImplicitNullCheck(instruction);
7685 __ movl(addr_high, Immediate(high_v));
7686 }
7687}
7688
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007689void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7690 const uint8_t* roots_data,
7691 const PatchInfo<Label>& info,
7692 uint64_t index_in_table) const {
7693 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7694 uintptr_t address =
7695 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00007696 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007697 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7698 dchecked_integral_cast<uint32_t>(address);
7699}
7700
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007701void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7702 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007703 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007704 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007705 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007706 }
7707
7708 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007709 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007710 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007711 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007712 }
7713}
7714
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05307715bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
7716 return codegen_->GetInstructionSetFeatures().HasAVX();
7717}
7718
7719bool LocationsBuilderX86_64::CpuHasAvx2FeatureFlag() {
7720 return codegen_->GetInstructionSetFeatures().HasAVX2();
7721}
7722
7723bool InstructionCodeGeneratorX86_64::CpuHasAvxFeatureFlag() {
7724 return codegen_->GetInstructionSetFeatures().HasAVX();
7725}
7726
7727bool InstructionCodeGeneratorX86_64::CpuHasAvx2FeatureFlag() {
7728 return codegen_->GetInstructionSetFeatures().HasAVX2();
7729}
7730
Roland Levillain4d027112015-07-01 15:41:14 +01007731#undef __
7732
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007733} // namespace x86_64
7734} // namespace art