blob: ccfb181e14d6f19b16b0d4d950973230b53eb05a [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Calin Juravle175dc732015-08-25 15:42:32 +010056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000197 // We're moving two locations to locations that could overlap, so we need a parallel
198 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100199 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000200 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100201 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000202 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100203 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100204 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100205 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
206 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000207 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
208 instruction_,
209 instruction_->GetDexPc(),
210 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000211 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100212 }
213
Alexandre Rames8158f282015-08-07 10:26:17 +0100214 bool IsFatal() const OVERRIDE { return true; }
215
Alexandre Rames9931f312015-06-19 14:47:01 +0100216 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
217
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100218 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100219 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
220};
221
Andreas Gampe85b62f22015-09-09 13:15:38 -0700222class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100223 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000224 LoadClassSlowPathX86_64(HLoadClass* cls,
225 HInstruction* at,
226 uint32_t dex_pc,
227 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000228 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000229 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
230 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100231
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000232 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000234 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100236
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000237 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000240 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000241 x86_64_codegen->InvokeRuntime(do_clinit_ ?
242 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
243 QUICK_ENTRY_POINT(pInitializeType),
244 at_,
245 dex_pc_,
246 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000247 if (do_clinit_) {
248 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
249 } else {
250 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
251 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000253 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000254 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000255 if (out.IsValid()) {
256 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000257 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 }
259
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000260 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100261 __ jmp(GetExitLabel());
262 }
263
Alexandre Rames9931f312015-06-19 14:47:01 +0100264 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
265
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100266 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000267 // The class this slow path will load.
268 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000270 // The instruction where this slow path is happening.
271 // (Might be the load class or an initialization check).
272 HInstruction* const at_;
273
274 // The dex PC of `at_`.
275 const uint32_t dex_pc_;
276
277 // Whether to initialize the class.
278 const bool do_clinit_;
279
280 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100281};
282
Andreas Gampe85b62f22015-09-09 13:15:38 -0700283class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000284 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000285 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000286
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000287 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 LocationSummary* locations = instruction_->GetLocations();
289 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
290
Roland Levillain0d5a2812015-11-13 10:07:31 +0000291 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000293 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000294
295 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000296 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
297 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000298 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
299 instruction_,
300 instruction_->GetDexPc(),
301 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000302 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000303 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000304 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000305 __ jmp(GetExitLabel());
306 }
307
Alexandre Rames9931f312015-06-19 14:47:01 +0100308 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
309
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000310 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000311 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
312};
313
Andreas Gampe85b62f22015-09-09 13:15:38 -0700314class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000315 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000316 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000317 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000318
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000319 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000320 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100321 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
322 : locations->Out();
323 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000324 DCHECK(instruction_->IsCheckCast()
325 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326
Roland Levillain0d5a2812015-11-13 10:07:31 +0000327 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000328 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000329
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000330 if (!is_fatal_) {
331 SaveLiveRegisters(codegen, locations);
332 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000333
334 // We're moving two locations to locations that could overlap, so we need a parallel
335 // move resolver.
336 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000337 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100338 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000339 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100340 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100341 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100342 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
343 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000344
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000346 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
347 instruction_,
348 dex_pc,
349 this);
350 CheckEntrypointTypes<
351 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000352 } else {
353 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000354 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
355 instruction_,
356 dex_pc,
357 this);
358 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000360
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000361 if (!is_fatal_) {
362 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000363 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000364 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000365
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000366 RestoreLiveRegisters(codegen, locations);
367 __ jmp(GetExitLabel());
368 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000369 }
370
Alexandre Rames9931f312015-06-19 14:47:01 +0100371 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
372
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000373 bool IsFatal() const OVERRIDE { return is_fatal_; }
374
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000375 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000376 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000377
378 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
379};
380
Andreas Gampe85b62f22015-09-09 13:15:38 -0700381class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700382 public:
Aart Bik42249c32016-01-07 15:33:50 -0800383 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000384 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700385
386 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000387 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700388 __ Bind(GetEntryLabel());
389 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000390 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800391 instruction_,
392 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000393 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000394 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 }
396
Alexandre Rames9931f312015-06-19 14:47:01 +0100397 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
398
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700400 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
401};
402
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100403class ArraySetSlowPathX86_64 : public SlowPathCode {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 LocationSummary* locations = instruction_->GetLocations();
409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, locations);
411
412 InvokeRuntimeCallingConvention calling_convention;
413 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
414 parallel_move.AddMove(
415 locations->InAt(0),
416 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
417 Primitive::kPrimNot,
418 nullptr);
419 parallel_move.AddMove(
420 locations->InAt(1),
421 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
422 Primitive::kPrimInt,
423 nullptr);
424 parallel_move.AddMove(
425 locations->InAt(2),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
427 Primitive::kPrimNot,
428 nullptr);
429 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
430
Roland Levillain0d5a2812015-11-13 10:07:31 +0000431 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
432 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
433 instruction_,
434 instruction_->GetDexPc(),
435 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000436 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100437 RestoreLiveRegisters(codegen, locations);
438 __ jmp(GetExitLabel());
439 }
440
441 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
442
443 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
445};
446
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000447// Slow path marking an object during a read barrier.
448class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
449 public:
450 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000451 : SlowPathCode(instruction), out_(out), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000452 DCHECK(kEmitCompilerReadBarrier);
453 }
454
455 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
456
457 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
458 LocationSummary* locations = instruction_->GetLocations();
459 Register reg_out = out_.AsRegister<Register>();
460 DCHECK(locations->CanCall());
461 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
462 DCHECK(instruction_->IsInstanceFieldGet() ||
463 instruction_->IsStaticFieldGet() ||
464 instruction_->IsArrayGet() ||
465 instruction_->IsLoadClass() ||
466 instruction_->IsLoadString() ||
467 instruction_->IsInstanceOf() ||
468 instruction_->IsCheckCast())
469 << "Unexpected instruction in read barrier marking slow path: "
470 << instruction_->DebugName();
471
472 __ Bind(GetEntryLabel());
473 SaveLiveRegisters(codegen, locations);
474
475 InvokeRuntimeCallingConvention calling_convention;
476 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
477 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
478 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
479 instruction_,
480 instruction_->GetDexPc(),
481 this);
482 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
483 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
484
485 RestoreLiveRegisters(codegen, locations);
486 __ jmp(GetExitLabel());
487 }
488
489 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000490 const Location out_;
491 const Location obj_;
492
493 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
494};
495
Roland Levillain0d5a2812015-11-13 10:07:31 +0000496// Slow path generating a read barrier for a heap reference.
497class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
498 public:
499 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
500 Location out,
501 Location ref,
502 Location obj,
503 uint32_t offset,
504 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000505 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000506 out_(out),
507 ref_(ref),
508 obj_(obj),
509 offset_(offset),
510 index_(index) {
511 DCHECK(kEmitCompilerReadBarrier);
512 // If `obj` is equal to `out` or `ref`, it means the initial
513 // object has been overwritten by (or after) the heap object
514 // reference load to be instrumented, e.g.:
515 //
516 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000517 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000518 //
519 // In that case, we have lost the information about the original
520 // object, and the emitted read barrier cannot work properly.
521 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
522 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
523}
524
525 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
526 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
527 LocationSummary* locations = instruction_->GetLocations();
528 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
529 DCHECK(locations->CanCall());
530 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
531 DCHECK(!instruction_->IsInvoke() ||
532 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000533 instruction_->GetLocations()->Intrinsified()))
534 << "Unexpected instruction in read barrier for heap reference slow path: "
535 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000536
537 __ Bind(GetEntryLabel());
538 SaveLiveRegisters(codegen, locations);
539
540 // We may have to change the index's value, but as `index_` is a
541 // constant member (like other "inputs" of this slow path),
542 // introduce a copy of it, `index`.
543 Location index = index_;
544 if (index_.IsValid()) {
545 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
546 if (instruction_->IsArrayGet()) {
547 // Compute real offset and store it in index_.
548 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
549 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
550 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
551 // We are about to change the value of `index_reg` (see the
552 // calls to art::x86_64::X86_64Assembler::shll and
553 // art::x86_64::X86_64Assembler::AddImmediate below), but it
554 // has not been saved by the previous call to
555 // art::SlowPathCode::SaveLiveRegisters, as it is a
556 // callee-save register --
557 // art::SlowPathCode::SaveLiveRegisters does not consider
558 // callee-save registers, as it has been designed with the
559 // assumption that callee-save registers are supposed to be
560 // handled by the called function. So, as a callee-save
561 // register, `index_reg` _would_ eventually be saved onto
562 // the stack, but it would be too late: we would have
563 // changed its value earlier. Therefore, we manually save
564 // it here into another freely available register,
565 // `free_reg`, chosen of course among the caller-save
566 // registers (as a callee-save `free_reg` register would
567 // exhibit the same problem).
568 //
569 // Note we could have requested a temporary register from
570 // the register allocator instead; but we prefer not to, as
571 // this is a slow path, and we know we can find a
572 // caller-save register that is available.
573 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
574 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
575 index_reg = free_reg;
576 index = Location::RegisterLocation(index_reg);
577 } else {
578 // The initial register stored in `index_` has already been
579 // saved in the call to art::SlowPathCode::SaveLiveRegisters
580 // (as it is not a callee-save register), so we can freely
581 // use it.
582 }
583 // Shifting the index value contained in `index_reg` by the
584 // scale factor (2) cannot overflow in practice, as the
585 // runtime is unable to allocate object arrays with a size
586 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
587 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
588 static_assert(
589 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
590 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
591 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
592 } else {
593 DCHECK(instruction_->IsInvoke());
594 DCHECK(instruction_->GetLocations()->Intrinsified());
595 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
596 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
597 << instruction_->AsInvoke()->GetIntrinsic();
598 DCHECK_EQ(offset_, 0U);
599 DCHECK(index_.IsRegister());
600 }
601 }
602
603 // We're moving two or three locations to locations that could
604 // overlap, so we need a parallel move resolver.
605 InvokeRuntimeCallingConvention calling_convention;
606 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
607 parallel_move.AddMove(ref_,
608 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
609 Primitive::kPrimNot,
610 nullptr);
611 parallel_move.AddMove(obj_,
612 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
613 Primitive::kPrimNot,
614 nullptr);
615 if (index.IsValid()) {
616 parallel_move.AddMove(index,
617 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
618 Primitive::kPrimInt,
619 nullptr);
620 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
621 } else {
622 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
623 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
624 }
625 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
626 instruction_,
627 instruction_->GetDexPc(),
628 this);
629 CheckEntrypointTypes<
630 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
631 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
632
633 RestoreLiveRegisters(codegen, locations);
634 __ jmp(GetExitLabel());
635 }
636
637 const char* GetDescription() const OVERRIDE {
638 return "ReadBarrierForHeapReferenceSlowPathX86_64";
639 }
640
641 private:
642 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
643 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
644 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
645 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
646 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
647 return static_cast<CpuRegister>(i);
648 }
649 }
650 // We shall never fail to find a free caller-save register, as
651 // there are more than two core caller-save registers on x86-64
652 // (meaning it is possible to find one which is different from
653 // `ref` and `obj`).
654 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
655 LOG(FATAL) << "Could not find a free caller-save register";
656 UNREACHABLE();
657 }
658
Roland Levillain0d5a2812015-11-13 10:07:31 +0000659 const Location out_;
660 const Location ref_;
661 const Location obj_;
662 const uint32_t offset_;
663 // An additional location containing an index to an array.
664 // Only used for HArrayGet and the UnsafeGetObject &
665 // UnsafeGetObjectVolatile intrinsics.
666 const Location index_;
667
668 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
669};
670
671// Slow path generating a read barrier for a GC root.
672class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
673 public:
674 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000675 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000676 DCHECK(kEmitCompilerReadBarrier);
677 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000678
679 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
680 LocationSummary* locations = instruction_->GetLocations();
681 DCHECK(locations->CanCall());
682 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000683 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
684 << "Unexpected instruction in read barrier for GC root slow path: "
685 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000686
687 __ Bind(GetEntryLabel());
688 SaveLiveRegisters(codegen, locations);
689
690 InvokeRuntimeCallingConvention calling_convention;
691 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
692 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
693 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
694 instruction_,
695 instruction_->GetDexPc(),
696 this);
697 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
698 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
699
700 RestoreLiveRegisters(codegen, locations);
701 __ jmp(GetExitLabel());
702 }
703
704 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
705
706 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000707 const Location out_;
708 const Location root_;
709
710 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
711};
712
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100713#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700714// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
715#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100716
Roland Levillain4fa13f62015-07-06 18:11:54 +0100717inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700718 switch (cond) {
719 case kCondEQ: return kEqual;
720 case kCondNE: return kNotEqual;
721 case kCondLT: return kLess;
722 case kCondLE: return kLessEqual;
723 case kCondGT: return kGreater;
724 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700725 case kCondB: return kBelow;
726 case kCondBE: return kBelowEqual;
727 case kCondA: return kAbove;
728 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700729 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100730 LOG(FATAL) << "Unreachable";
731 UNREACHABLE();
732}
733
Aart Bike9f37602015-10-09 11:15:55 -0700734// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100735inline Condition X86_64FPCondition(IfCondition cond) {
736 switch (cond) {
737 case kCondEQ: return kEqual;
738 case kCondNE: return kNotEqual;
739 case kCondLT: return kBelow;
740 case kCondLE: return kBelowEqual;
741 case kCondGT: return kAbove;
742 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700743 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100744 };
745 LOG(FATAL) << "Unreachable";
746 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700747}
748
Vladimir Markodc151b22015-10-15 18:02:30 +0100749HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
750 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
751 MethodReference target_method ATTRIBUTE_UNUSED) {
752 switch (desired_dispatch_info.code_ptr_location) {
753 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
754 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
755 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
756 return HInvokeStaticOrDirect::DispatchInfo {
757 desired_dispatch_info.method_load_kind,
758 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
759 desired_dispatch_info.method_load_data,
760 0u
761 };
762 default:
763 return desired_dispatch_info;
764 }
765}
766
Serguei Katkov288c7a82016-05-16 11:53:15 +0600767Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
768 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800769 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000770 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
771 switch (invoke->GetMethodLoadKind()) {
772 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
773 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000774 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000775 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000776 break;
777 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000778 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000779 break;
780 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
781 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
782 break;
783 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
784 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
785 method_patches_.emplace_back(invoke->GetTargetMethod());
786 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
787 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000788 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000789 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000790 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000791 // Bind a new fixup label at the end of the "movl" insn.
792 uint32_t offset = invoke->GetDexCacheArrayOffset();
793 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000794 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000795 }
Vladimir Marko58155012015-08-19 12:49:41 +0000796 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000797 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000798 Register method_reg;
799 CpuRegister reg = temp.AsRegister<CpuRegister>();
800 if (current_method.IsRegister()) {
801 method_reg = current_method.AsRegister<Register>();
802 } else {
803 DCHECK(invoke->GetLocations()->Intrinsified());
804 DCHECK(!current_method.IsValid());
805 method_reg = reg.AsRegister();
806 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
807 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000808 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100809 __ movq(reg,
810 Address(CpuRegister(method_reg),
811 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100812 // temp = temp[index_in_cache];
813 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
814 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000815 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
816 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100817 }
Vladimir Marko58155012015-08-19 12:49:41 +0000818 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600819 return callee_method;
820}
821
822void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
823 Location temp) {
824 // All registers are assumed to be correctly set up.
825 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000826
827 switch (invoke->GetCodePtrLocation()) {
828 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
829 __ call(&frame_entry_label_);
830 break;
831 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
832 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
833 Label* label = &relative_call_patches_.back().label;
834 __ call(label); // Bind to the patch label, override at link time.
835 __ Bind(label); // Bind the label at the end of the "call" insn.
836 break;
837 }
838 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
839 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100840 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
841 LOG(FATAL) << "Unsupported";
842 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000843 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
844 // (callee_method + offset_of_quick_compiled_code)()
845 __ call(Address(callee_method.AsRegister<CpuRegister>(),
846 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
847 kX86_64WordSize).SizeValue()));
848 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000849 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800850
851 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800852}
853
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000854void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
855 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
856 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
857 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000858
859 // Use the calling convention instead of the location of the receiver, as
860 // intrinsics may have put the receiver in a different register. In the intrinsics
861 // slow path, the arguments have been moved to the right place, so here we are
862 // guaranteed that the receiver is the first register of the calling convention.
863 InvokeDexCallingConvention calling_convention;
864 Register receiver = calling_convention.GetRegisterAt(0);
865
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000866 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000867 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000868 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000869 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 // Instead of simply (possibly) unpoisoning `temp` here, we should
871 // emit a read barrier for the previous class reference load.
872 // However this is not required in practice, as this is an
873 // intermediate/temporary reference and because the current
874 // concurrent copying collector keeps the from-space memory
875 // intact/accessible until the end of the marking phase (the
876 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000877 __ MaybeUnpoisonHeapReference(temp);
878 // temp = temp->GetMethodAt(method_offset);
879 __ movq(temp, Address(temp, method_offset));
880 // call temp->GetEntryPoint();
881 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
882 kX86_64WordSize).SizeValue()));
883}
884
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000885void CodeGeneratorX86_64::RecordSimplePatch() {
886 if (GetCompilerOptions().GetIncludePatchInformation()) {
887 simple_patches_.emplace_back();
888 __ Bind(&simple_patches_.back());
889 }
890}
891
892void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
893 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
894 __ Bind(&string_patches_.back().label);
895}
896
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100897void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
898 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
899 __ Bind(&type_patches_.back().label);
900}
901
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000902Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
903 uint32_t element_offset) {
904 // Add a patch entry and return the label.
905 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
906 return &pc_relative_dex_cache_patches_.back().label;
907}
908
Vladimir Marko58155012015-08-19 12:49:41 +0000909void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
910 DCHECK(linker_patches->empty());
911 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000912 method_patches_.size() +
913 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000914 pc_relative_dex_cache_patches_.size() +
915 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100916 string_patches_.size() +
917 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000918 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000919 // The label points to the end of the "movl" insn but the literal offset for method
920 // patch needs to point to the embedded constant which occupies the last 4 bytes.
921 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000922 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000923 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000924 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
925 info.target_method.dex_file,
926 info.target_method.dex_method_index));
927 }
928 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000929 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000930 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
931 info.target_method.dex_file,
932 info.target_method.dex_method_index));
933 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000934 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
935 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000936 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
937 &info.target_dex_file,
938 info.label.Position(),
939 info.element_offset));
940 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000941 for (const Label& label : simple_patches_) {
942 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
943 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
944 }
945 for (const StringPatchInfo<Label>& info : string_patches_) {
946 // These are always PC-relative, see GetSupportedLoadStringKind().
947 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
948 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
949 &info.dex_file,
950 info.label.Position(),
951 info.string_index));
952 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100953 for (const TypePatchInfo<Label>& info : type_patches_) {
954 // These are always PC-relative, see GetSupportedLoadClassKind().
955 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
956 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
957 &info.dex_file,
958 info.label.Position(),
959 info.type_index));
960 }
Vladimir Marko58155012015-08-19 12:49:41 +0000961}
962
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100963void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100964 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100965}
966
967void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100968 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100969}
970
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100971size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
972 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
973 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100974}
975
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100976size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
977 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
978 return kX86_64WordSize;
979}
980
981size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
982 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
983 return kX86_64WordSize;
984}
985
986size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
987 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
988 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100989}
990
Calin Juravle175dc732015-08-25 15:42:32 +0100991void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
992 HInstruction* instruction,
993 uint32_t dex_pc,
994 SlowPathCode* slow_path) {
995 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
996 instruction,
997 dex_pc,
998 slow_path);
999}
1000
1001void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +01001002 HInstruction* instruction,
1003 uint32_t dex_pc,
1004 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001005 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001006 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +01001007 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +01001008}
1009
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001010static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001011// Use a fake return address register to mimic Quick.
1012static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001013CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001014 const X86_64InstructionSetFeatures& isa_features,
1015 const CompilerOptions& compiler_options,
1016 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001017 : CodeGenerator(graph,
1018 kNumberOfCpuRegisters,
1019 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001020 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001021 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1022 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001023 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001024 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1025 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001026 compiler_options,
1027 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001028 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001029 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001030 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001031 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001032 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001033 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001034 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001035 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1036 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001037 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001038 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1039 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001040 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001041 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001042 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1043}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001044
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001045InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1046 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001047 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001048 assembler_(codegen->GetAssembler()),
1049 codegen_(codegen) {}
1050
David Brazdil58282f42016-01-14 12:45:10 +00001051void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001052 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001053 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001054
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001055 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001056 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001057}
1058
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001059static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001060 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001061}
David Srbecky9d8606d2015-04-12 09:35:32 +01001062
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001063static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001064 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001065}
1066
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001067void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001068 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001069 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001070 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001071 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001072 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001073
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001074 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001075 __ testq(CpuRegister(RAX), Address(
1076 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001077 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001078 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001079
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001080 if (HasEmptyFrame()) {
1081 return;
1082 }
1083
Nicolas Geoffray98893962015-01-21 12:32:32 +00001084 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001085 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001086 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001087 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001088 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1089 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001090 }
1091 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001092
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001093 int adjust = GetFrameSize() - GetCoreSpillSize();
1094 __ subq(CpuRegister(RSP), Immediate(adjust));
1095 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001096 uint32_t xmm_spill_location = GetFpuSpillStart();
1097 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001098
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001099 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1100 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001101 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1102 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1103 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001104 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001105 }
1106
Mathieu Chartiere401d142015-04-22 13:56:20 -07001107 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001108 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001109}
1110
1111void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001112 __ cfi().RememberState();
1113 if (!HasEmptyFrame()) {
1114 uint32_t xmm_spill_location = GetFpuSpillStart();
1115 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1116 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1117 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1118 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1119 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1120 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1121 }
1122 }
1123
1124 int adjust = GetFrameSize() - GetCoreSpillSize();
1125 __ addq(CpuRegister(RSP), Immediate(adjust));
1126 __ cfi().AdjustCFAOffset(-adjust);
1127
1128 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1129 Register reg = kCoreCalleeSaves[i];
1130 if (allocated_registers_.ContainsCoreRegister(reg)) {
1131 __ popq(CpuRegister(reg));
1132 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1133 __ cfi().Restore(DWARFReg(reg));
1134 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001135 }
1136 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001137 __ ret();
1138 __ cfi().RestoreState();
1139 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001140}
1141
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001142void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1143 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001144}
1145
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001146void CodeGeneratorX86_64::Move(Location destination, Location source) {
1147 if (source.Equals(destination)) {
1148 return;
1149 }
1150 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001151 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001152 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001153 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001154 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001155 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001156 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001157 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1158 } else if (source.IsConstant()) {
1159 HConstant* constant = source.GetConstant();
1160 if (constant->IsLongConstant()) {
1161 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1162 } else {
1163 Load32BitValue(dest, GetInt32ValueOf(constant));
1164 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001165 } else {
1166 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001167 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001168 }
1169 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001170 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001171 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001172 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001173 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001174 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1175 } else if (source.IsConstant()) {
1176 HConstant* constant = source.GetConstant();
1177 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1178 if (constant->IsFloatConstant()) {
1179 Load32BitValue(dest, static_cast<int32_t>(value));
1180 } else {
1181 Load64BitValue(dest, value);
1182 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001183 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001184 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001185 } else {
1186 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001187 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001188 }
1189 } else if (destination.IsStackSlot()) {
1190 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001191 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001192 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001193 } else if (source.IsFpuRegister()) {
1194 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001195 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001196 } else if (source.IsConstant()) {
1197 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001198 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001199 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001200 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001201 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001202 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1203 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001204 }
1205 } else {
1206 DCHECK(destination.IsDoubleStackSlot());
1207 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001208 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001209 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001210 } else if (source.IsFpuRegister()) {
1211 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001212 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001213 } else if (source.IsConstant()) {
1214 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001215 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001216 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001217 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001218 } else {
1219 DCHECK(constant->IsLongConstant());
1220 value = constant->AsLongConstant()->GetValue();
1221 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001222 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001223 } else {
1224 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001225 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1226 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001227 }
1228 }
1229}
1230
Calin Juravle175dc732015-08-25 15:42:32 +01001231void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1232 DCHECK(location.IsRegister());
1233 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1234}
1235
Calin Juravlee460d1d2015-09-29 04:52:17 +01001236void CodeGeneratorX86_64::MoveLocation(
1237 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1238 Move(dst, src);
1239}
1240
1241void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1242 if (location.IsRegister()) {
1243 locations->AddTemp(location);
1244 } else {
1245 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1246 }
1247}
1248
David Brazdilfc6a86a2015-06-26 10:33:45 +00001249void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001250 DCHECK(!successor->IsExitBlock());
1251
1252 HBasicBlock* block = got->GetBlock();
1253 HInstruction* previous = got->GetPrevious();
1254
1255 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001256 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001257 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1258 return;
1259 }
1260
1261 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1262 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1263 }
1264 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001265 __ jmp(codegen_->GetLabelOf(successor));
1266 }
1267}
1268
David Brazdilfc6a86a2015-06-26 10:33:45 +00001269void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1270 got->SetLocations(nullptr);
1271}
1272
1273void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1274 HandleGoto(got, got->GetSuccessor());
1275}
1276
1277void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1278 try_boundary->SetLocations(nullptr);
1279}
1280
1281void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1282 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1283 if (!successor->IsExitBlock()) {
1284 HandleGoto(try_boundary, successor);
1285 }
1286}
1287
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001288void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1289 exit->SetLocations(nullptr);
1290}
1291
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001292void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001293}
1294
Mark Mendell152408f2015-12-31 12:28:50 -05001295template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001296void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001297 LabelType* true_label,
1298 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001299 if (cond->IsFPConditionTrueIfNaN()) {
1300 __ j(kUnordered, true_label);
1301 } else if (cond->IsFPConditionFalseIfNaN()) {
1302 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001303 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001304 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001305}
1306
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001307void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001308 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001309
Mark Mendellc4701932015-04-10 13:18:51 -04001310 Location left = locations->InAt(0);
1311 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001312 Primitive::Type type = condition->InputAt(0)->GetType();
1313 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001314 case Primitive::kPrimBoolean:
1315 case Primitive::kPrimByte:
1316 case Primitive::kPrimChar:
1317 case Primitive::kPrimShort:
1318 case Primitive::kPrimInt:
1319 case Primitive::kPrimNot: {
1320 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1321 if (right.IsConstant()) {
1322 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1323 if (value == 0) {
1324 __ testl(left_reg, left_reg);
1325 } else {
1326 __ cmpl(left_reg, Immediate(value));
1327 }
1328 } else if (right.IsStackSlot()) {
1329 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1330 } else {
1331 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1332 }
1333 break;
1334 }
Mark Mendellc4701932015-04-10 13:18:51 -04001335 case Primitive::kPrimLong: {
1336 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1337 if (right.IsConstant()) {
1338 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001339 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001340 } else if (right.IsDoubleStackSlot()) {
1341 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1342 } else {
1343 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1344 }
Mark Mendellc4701932015-04-10 13:18:51 -04001345 break;
1346 }
1347 case Primitive::kPrimFloat: {
1348 if (right.IsFpuRegister()) {
1349 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1350 } else if (right.IsConstant()) {
1351 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1352 codegen_->LiteralFloatAddress(
1353 right.GetConstant()->AsFloatConstant()->GetValue()));
1354 } else {
1355 DCHECK(right.IsStackSlot());
1356 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1357 Address(CpuRegister(RSP), right.GetStackIndex()));
1358 }
Mark Mendellc4701932015-04-10 13:18:51 -04001359 break;
1360 }
1361 case Primitive::kPrimDouble: {
1362 if (right.IsFpuRegister()) {
1363 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1364 } else if (right.IsConstant()) {
1365 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1366 codegen_->LiteralDoubleAddress(
1367 right.GetConstant()->AsDoubleConstant()->GetValue()));
1368 } else {
1369 DCHECK(right.IsDoubleStackSlot());
1370 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1371 Address(CpuRegister(RSP), right.GetStackIndex()));
1372 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001373 break;
1374 }
1375 default:
1376 LOG(FATAL) << "Unexpected condition type " << type;
1377 }
1378}
1379
1380template<class LabelType>
1381void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1382 LabelType* true_target_in,
1383 LabelType* false_target_in) {
1384 // Generated branching requires both targets to be explicit. If either of the
1385 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1386 LabelType fallthrough_target;
1387 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1388 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1389
1390 // Generate the comparison to set the CC.
1391 GenerateCompareTest(condition);
1392
1393 // Now generate the correct jump(s).
1394 Primitive::Type type = condition->InputAt(0)->GetType();
1395 switch (type) {
1396 case Primitive::kPrimLong: {
1397 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1398 break;
1399 }
1400 case Primitive::kPrimFloat: {
1401 GenerateFPJumps(condition, true_target, false_target);
1402 break;
1403 }
1404 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001405 GenerateFPJumps(condition, true_target, false_target);
1406 break;
1407 }
1408 default:
1409 LOG(FATAL) << "Unexpected condition type " << type;
1410 }
1411
David Brazdil0debae72015-11-12 18:37:00 +00001412 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001413 __ jmp(false_target);
1414 }
David Brazdil0debae72015-11-12 18:37:00 +00001415
1416 if (fallthrough_target.IsLinked()) {
1417 __ Bind(&fallthrough_target);
1418 }
Mark Mendellc4701932015-04-10 13:18:51 -04001419}
1420
David Brazdil0debae72015-11-12 18:37:00 +00001421static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1422 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1423 // are set only strictly before `branch`. We can't use the eflags on long
1424 // conditions if they are materialized due to the complex branching.
1425 return cond->IsCondition() &&
1426 cond->GetNext() == branch &&
1427 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1428}
1429
Mark Mendell152408f2015-12-31 12:28:50 -05001430template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001431void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001432 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001433 LabelType* true_target,
1434 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001435 HInstruction* cond = instruction->InputAt(condition_input_index);
1436
1437 if (true_target == nullptr && false_target == nullptr) {
1438 // Nothing to do. The code always falls through.
1439 return;
1440 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001441 // Constant condition, statically compared against "true" (integer value 1).
1442 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001443 if (true_target != nullptr) {
1444 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001445 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001446 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001447 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001448 if (false_target != nullptr) {
1449 __ jmp(false_target);
1450 }
1451 }
1452 return;
1453 }
1454
1455 // The following code generates these patterns:
1456 // (1) true_target == nullptr && false_target != nullptr
1457 // - opposite condition true => branch to false_target
1458 // (2) true_target != nullptr && false_target == nullptr
1459 // - condition true => branch to true_target
1460 // (3) true_target != nullptr && false_target != nullptr
1461 // - condition true => branch to true_target
1462 // - branch to false_target
1463 if (IsBooleanValueOrMaterializedCondition(cond)) {
1464 if (AreEflagsSetFrom(cond, instruction)) {
1465 if (true_target == nullptr) {
1466 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1467 } else {
1468 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1469 }
1470 } else {
1471 // Materialized condition, compare against 0.
1472 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1473 if (lhs.IsRegister()) {
1474 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1475 } else {
1476 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1477 }
1478 if (true_target == nullptr) {
1479 __ j(kEqual, false_target);
1480 } else {
1481 __ j(kNotEqual, true_target);
1482 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001483 }
1484 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001485 // Condition has not been materialized, use its inputs as the
1486 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001487 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001488
David Brazdil0debae72015-11-12 18:37:00 +00001489 // If this is a long or FP comparison that has been folded into
1490 // the HCondition, generate the comparison directly.
1491 Primitive::Type type = condition->InputAt(0)->GetType();
1492 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1493 GenerateCompareTestAndBranch(condition, true_target, false_target);
1494 return;
1495 }
1496
1497 Location lhs = condition->GetLocations()->InAt(0);
1498 Location rhs = condition->GetLocations()->InAt(1);
1499 if (rhs.IsRegister()) {
1500 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1501 } else if (rhs.IsConstant()) {
1502 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001503 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001504 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001505 __ cmpl(lhs.AsRegister<CpuRegister>(),
1506 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1507 }
1508 if (true_target == nullptr) {
1509 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1510 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001511 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001512 }
Dave Allison20dfc792014-06-16 20:44:29 -07001513 }
David Brazdil0debae72015-11-12 18:37:00 +00001514
1515 // If neither branch falls through (case 3), the conditional branch to `true_target`
1516 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1517 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001518 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001519 }
1520}
1521
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001522void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001523 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1524 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001525 locations->SetInAt(0, Location::Any());
1526 }
1527}
1528
1529void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001530 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1531 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1532 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1533 nullptr : codegen_->GetLabelOf(true_successor);
1534 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1535 nullptr : codegen_->GetLabelOf(false_successor);
1536 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001537}
1538
1539void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1540 LocationSummary* locations = new (GetGraph()->GetArena())
1541 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001542 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001543 locations->SetInAt(0, Location::Any());
1544 }
1545}
1546
1547void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001548 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001549 GenerateTestAndBranch<Label>(deoptimize,
1550 /* condition_input_index */ 0,
1551 slow_path->GetEntryLabel(),
1552 /* false_target */ nullptr);
1553}
1554
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001555static bool SelectCanUseCMOV(HSelect* select) {
1556 // There are no conditional move instructions for XMMs.
1557 if (Primitive::IsFloatingPointType(select->GetType())) {
1558 return false;
1559 }
1560
1561 // A FP condition doesn't generate the single CC that we need.
1562 HInstruction* condition = select->GetCondition();
1563 if (condition->IsCondition() &&
1564 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1565 return false;
1566 }
1567
1568 // We can generate a CMOV for this Select.
1569 return true;
1570}
1571
David Brazdil74eb1b22015-12-14 11:44:01 +00001572void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1573 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1574 if (Primitive::IsFloatingPointType(select->GetType())) {
1575 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001576 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001577 } else {
1578 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001579 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001580 if (select->InputAt(1)->IsConstant()) {
1581 locations->SetInAt(1, Location::RequiresRegister());
1582 } else {
1583 locations->SetInAt(1, Location::Any());
1584 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001585 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001586 locations->SetInAt(1, Location::Any());
1587 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001588 }
1589 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1590 locations->SetInAt(2, Location::RequiresRegister());
1591 }
1592 locations->SetOut(Location::SameAsFirstInput());
1593}
1594
1595void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1596 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001597 if (SelectCanUseCMOV(select)) {
1598 // If both the condition and the source types are integer, we can generate
1599 // a CMOV to implement Select.
1600 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001601 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001602 DCHECK(locations->InAt(0).Equals(locations->Out()));
1603
1604 HInstruction* select_condition = select->GetCondition();
1605 Condition cond = kNotEqual;
1606
1607 // Figure out how to test the 'condition'.
1608 if (select_condition->IsCondition()) {
1609 HCondition* condition = select_condition->AsCondition();
1610 if (!condition->IsEmittedAtUseSite()) {
1611 // This was a previously materialized condition.
1612 // Can we use the existing condition code?
1613 if (AreEflagsSetFrom(condition, select)) {
1614 // Materialization was the previous instruction. Condition codes are right.
1615 cond = X86_64IntegerCondition(condition->GetCondition());
1616 } else {
1617 // No, we have to recreate the condition code.
1618 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1619 __ testl(cond_reg, cond_reg);
1620 }
1621 } else {
1622 GenerateCompareTest(condition);
1623 cond = X86_64IntegerCondition(condition->GetCondition());
1624 }
1625 } else {
1626 // Must be a boolean condition, which needs to be compared to 0.
1627 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1628 __ testl(cond_reg, cond_reg);
1629 }
1630
1631 // If the condition is true, overwrite the output, which already contains false.
1632 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001633 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1634 if (value_true_loc.IsRegister()) {
1635 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1636 } else {
1637 __ cmov(cond,
1638 value_false,
1639 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1640 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001641 } else {
1642 NearLabel false_target;
1643 GenerateTestAndBranch<NearLabel>(select,
1644 /* condition_input_index */ 2,
1645 /* true_target */ nullptr,
1646 &false_target);
1647 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1648 __ Bind(&false_target);
1649 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001650}
1651
David Srbecky0cf44932015-12-09 14:09:59 +00001652void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1653 new (GetGraph()->GetArena()) LocationSummary(info);
1654}
1655
David Srbeckyd28f4a02016-03-14 17:14:24 +00001656void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1657 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001658}
1659
1660void CodeGeneratorX86_64::GenerateNop() {
1661 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001662}
1663
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001664void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001665 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001666 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001667 // Handle the long/FP comparisons made in instruction simplification.
1668 switch (cond->InputAt(0)->GetType()) {
1669 case Primitive::kPrimLong:
1670 locations->SetInAt(0, Location::RequiresRegister());
1671 locations->SetInAt(1, Location::Any());
1672 break;
1673 case Primitive::kPrimFloat:
1674 case Primitive::kPrimDouble:
1675 locations->SetInAt(0, Location::RequiresFpuRegister());
1676 locations->SetInAt(1, Location::Any());
1677 break;
1678 default:
1679 locations->SetInAt(0, Location::RequiresRegister());
1680 locations->SetInAt(1, Location::Any());
1681 break;
1682 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001683 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001684 locations->SetOut(Location::RequiresRegister());
1685 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001686}
1687
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001688void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001689 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001690 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001691 }
Mark Mendellc4701932015-04-10 13:18:51 -04001692
1693 LocationSummary* locations = cond->GetLocations();
1694 Location lhs = locations->InAt(0);
1695 Location rhs = locations->InAt(1);
1696 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001697 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001698
1699 switch (cond->InputAt(0)->GetType()) {
1700 default:
1701 // Integer case.
1702
1703 // Clear output register: setcc only sets the low byte.
1704 __ xorl(reg, reg);
1705
1706 if (rhs.IsRegister()) {
1707 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1708 } else if (rhs.IsConstant()) {
1709 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001710 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001711 } else {
1712 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1713 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001714 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001715 return;
1716 case Primitive::kPrimLong:
1717 // Clear output register: setcc only sets the low byte.
1718 __ xorl(reg, reg);
1719
1720 if (rhs.IsRegister()) {
1721 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1722 } else if (rhs.IsConstant()) {
1723 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001724 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001725 } else {
1726 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1727 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001728 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001729 return;
1730 case Primitive::kPrimFloat: {
1731 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1732 if (rhs.IsConstant()) {
1733 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1734 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1735 } else if (rhs.IsStackSlot()) {
1736 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1737 } else {
1738 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1739 }
1740 GenerateFPJumps(cond, &true_label, &false_label);
1741 break;
1742 }
1743 case Primitive::kPrimDouble: {
1744 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1745 if (rhs.IsConstant()) {
1746 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1747 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1748 } else if (rhs.IsDoubleStackSlot()) {
1749 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1750 } else {
1751 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1752 }
1753 GenerateFPJumps(cond, &true_label, &false_label);
1754 break;
1755 }
1756 }
1757
1758 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001759 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001760
Roland Levillain4fa13f62015-07-06 18:11:54 +01001761 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001762 __ Bind(&false_label);
1763 __ xorl(reg, reg);
1764 __ jmp(&done_label);
1765
Roland Levillain4fa13f62015-07-06 18:11:54 +01001766 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001767 __ Bind(&true_label);
1768 __ movl(reg, Immediate(1));
1769 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001770}
1771
1772void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001773 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001774}
1775
1776void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001777 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001778}
1779
1780void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001781 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001782}
1783
1784void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001785 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001786}
1787
1788void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001789 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001790}
1791
1792void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001793 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001794}
1795
1796void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001797 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001798}
1799
1800void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001801 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001802}
1803
1804void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001805 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001806}
1807
1808void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001809 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001810}
1811
1812void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001813 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001814}
1815
1816void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001817 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001818}
1819
Aart Bike9f37602015-10-09 11:15:55 -07001820void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001821 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001822}
1823
1824void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001825 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001826}
1827
1828void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001829 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001830}
1831
1832void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001833 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001834}
1835
1836void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001837 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001838}
1839
1840void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001841 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001842}
1843
1844void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001845 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001846}
1847
1848void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001849 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001850}
1851
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001852void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001853 LocationSummary* locations =
1854 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001855 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001856 case Primitive::kPrimBoolean:
1857 case Primitive::kPrimByte:
1858 case Primitive::kPrimShort:
1859 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001860 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001861 case Primitive::kPrimLong: {
1862 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001863 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001864 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1865 break;
1866 }
1867 case Primitive::kPrimFloat:
1868 case Primitive::kPrimDouble: {
1869 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001870 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001871 locations->SetOut(Location::RequiresRegister());
1872 break;
1873 }
1874 default:
1875 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1876 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001877}
1878
1879void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001880 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001881 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001882 Location left = locations->InAt(0);
1883 Location right = locations->InAt(1);
1884
Mark Mendell0c9497d2015-08-21 09:30:05 -04001885 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001886 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001887 Condition less_cond = kLess;
1888
Calin Juravleddb7df22014-11-25 20:56:51 +00001889 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001890 case Primitive::kPrimBoolean:
1891 case Primitive::kPrimByte:
1892 case Primitive::kPrimShort:
1893 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001894 case Primitive::kPrimInt: {
1895 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1896 if (right.IsConstant()) {
1897 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1898 codegen_->Compare32BitValue(left_reg, value);
1899 } else if (right.IsStackSlot()) {
1900 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1901 } else {
1902 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1903 }
1904 break;
1905 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001906 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001907 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1908 if (right.IsConstant()) {
1909 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001910 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001911 } else if (right.IsDoubleStackSlot()) {
1912 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001913 } else {
1914 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1915 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001916 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001917 }
1918 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001919 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1920 if (right.IsConstant()) {
1921 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1922 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1923 } else if (right.IsStackSlot()) {
1924 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1925 } else {
1926 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1927 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001928 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001929 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001930 break;
1931 }
1932 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001933 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1934 if (right.IsConstant()) {
1935 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1936 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1937 } else if (right.IsDoubleStackSlot()) {
1938 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1939 } else {
1940 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1941 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001942 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001943 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001944 break;
1945 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001946 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001947 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001948 }
Aart Bika19616e2016-02-01 18:57:58 -08001949
Calin Juravleddb7df22014-11-25 20:56:51 +00001950 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001951 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001952 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001953
Calin Juravle91debbc2014-11-26 19:01:09 +00001954 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001955 __ movl(out, Immediate(1));
1956 __ jmp(&done);
1957
1958 __ Bind(&less);
1959 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001960
1961 __ Bind(&done);
1962}
1963
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001964void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001965 LocationSummary* locations =
1966 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001967 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001968}
1969
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001970void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001971 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001972}
1973
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001974void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1975 LocationSummary* locations =
1976 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1977 locations->SetOut(Location::ConstantLocation(constant));
1978}
1979
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001980void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001981 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001982}
1983
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001984void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001985 LocationSummary* locations =
1986 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001987 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001988}
1989
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001990void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001991 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001992}
1993
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001994void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1995 LocationSummary* locations =
1996 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1997 locations->SetOut(Location::ConstantLocation(constant));
1998}
1999
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002000void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002001 // Will be generated at use site.
2002}
2003
2004void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2005 LocationSummary* locations =
2006 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2007 locations->SetOut(Location::ConstantLocation(constant));
2008}
2009
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002010void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2011 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002012 // Will be generated at use site.
2013}
2014
Calin Juravle27df7582015-04-17 19:12:31 +01002015void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2016 memory_barrier->SetLocations(nullptr);
2017}
2018
2019void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002020 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002021}
2022
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002023void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2024 ret->SetLocations(nullptr);
2025}
2026
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002027void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002028 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002029}
2030
2031void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002032 LocationSummary* locations =
2033 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002034 switch (ret->InputAt(0)->GetType()) {
2035 case Primitive::kPrimBoolean:
2036 case Primitive::kPrimByte:
2037 case Primitive::kPrimChar:
2038 case Primitive::kPrimShort:
2039 case Primitive::kPrimInt:
2040 case Primitive::kPrimNot:
2041 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002042 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002043 break;
2044
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002045 case Primitive::kPrimFloat:
2046 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002047 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002048 break;
2049
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002050 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002051 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002052 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002053}
2054
2055void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2056 if (kIsDebugBuild) {
2057 switch (ret->InputAt(0)->GetType()) {
2058 case Primitive::kPrimBoolean:
2059 case Primitive::kPrimByte:
2060 case Primitive::kPrimChar:
2061 case Primitive::kPrimShort:
2062 case Primitive::kPrimInt:
2063 case Primitive::kPrimNot:
2064 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002065 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002066 break;
2067
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002068 case Primitive::kPrimFloat:
2069 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002070 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002071 XMM0);
2072 break;
2073
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002074 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002075 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002076 }
2077 }
2078 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002079}
2080
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002081Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2082 switch (type) {
2083 case Primitive::kPrimBoolean:
2084 case Primitive::kPrimByte:
2085 case Primitive::kPrimChar:
2086 case Primitive::kPrimShort:
2087 case Primitive::kPrimInt:
2088 case Primitive::kPrimNot:
2089 case Primitive::kPrimLong:
2090 return Location::RegisterLocation(RAX);
2091
2092 case Primitive::kPrimVoid:
2093 return Location::NoLocation();
2094
2095 case Primitive::kPrimDouble:
2096 case Primitive::kPrimFloat:
2097 return Location::FpuRegisterLocation(XMM0);
2098 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002099
2100 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002101}
2102
2103Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2104 return Location::RegisterLocation(kMethodRegisterArgument);
2105}
2106
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002107Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002108 switch (type) {
2109 case Primitive::kPrimBoolean:
2110 case Primitive::kPrimByte:
2111 case Primitive::kPrimChar:
2112 case Primitive::kPrimShort:
2113 case Primitive::kPrimInt:
2114 case Primitive::kPrimNot: {
2115 uint32_t index = gp_index_++;
2116 stack_index_++;
2117 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002118 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002119 } else {
2120 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2121 }
2122 }
2123
2124 case Primitive::kPrimLong: {
2125 uint32_t index = gp_index_;
2126 stack_index_ += 2;
2127 if (index < calling_convention.GetNumberOfRegisters()) {
2128 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002129 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002130 } else {
2131 gp_index_ += 2;
2132 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2133 }
2134 }
2135
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002136 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002137 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002138 stack_index_++;
2139 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002140 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002141 } else {
2142 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2143 }
2144 }
2145
2146 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002147 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002148 stack_index_ += 2;
2149 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002150 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002151 } else {
2152 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2153 }
2154 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002155
2156 case Primitive::kPrimVoid:
2157 LOG(FATAL) << "Unexpected parameter type " << type;
2158 break;
2159 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002160 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002161}
2162
Calin Juravle175dc732015-08-25 15:42:32 +01002163void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2164 // The trampoline uses the same calling convention as dex calling conventions,
2165 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2166 // the method_idx.
2167 HandleInvoke(invoke);
2168}
2169
2170void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2171 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2172}
2173
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002174void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002175 // Explicit clinit checks triggered by static invokes must have been pruned by
2176 // art::PrepareForRegisterAllocation.
2177 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002178
Mark Mendellfb8d2792015-03-31 22:16:59 -04002179 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002180 if (intrinsic.TryDispatch(invoke)) {
2181 return;
2182 }
2183
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002184 HandleInvoke(invoke);
2185}
2186
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002187static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2188 if (invoke->GetLocations()->Intrinsified()) {
2189 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2190 intrinsic.Dispatch(invoke);
2191 return true;
2192 }
2193 return false;
2194}
2195
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002196void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002197 // Explicit clinit checks triggered by static invokes must have been pruned by
2198 // art::PrepareForRegisterAllocation.
2199 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002200
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002201 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2202 return;
2203 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002204
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002205 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002206 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002207 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002208 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002209}
2210
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002211void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002212 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002213 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002214}
2215
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002216void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002217 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002218 if (intrinsic.TryDispatch(invoke)) {
2219 return;
2220 }
2221
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002222 HandleInvoke(invoke);
2223}
2224
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002225void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002226 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2227 return;
2228 }
2229
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002230 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002231 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002232 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002233}
2234
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002235void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2236 HandleInvoke(invoke);
2237 // Add the hidden argument.
2238 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2239}
2240
2241void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2242 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002243 LocationSummary* locations = invoke->GetLocations();
2244 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2245 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002246 Location receiver = locations->InAt(0);
2247 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2248
Roland Levillain0d5a2812015-11-13 10:07:31 +00002249 // Set the hidden argument. This is safe to do this here, as RAX
2250 // won't be modified thereafter, before the `call` instruction.
2251 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002252 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002253
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002254 if (receiver.IsStackSlot()) {
2255 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002256 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002257 __ movl(temp, Address(temp, class_offset));
2258 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002259 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002260 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002261 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002262 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002263 // Instead of simply (possibly) unpoisoning `temp` here, we should
2264 // emit a read barrier for the previous class reference load.
2265 // However this is not required in practice, as this is an
2266 // intermediate/temporary reference and because the current
2267 // concurrent copying collector keeps the from-space memory
2268 // intact/accessible until the end of the marking phase (the
2269 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002270 __ MaybeUnpoisonHeapReference(temp);
Nelli Kimbadee982016-05-13 13:08:53 +03002271 // temp = temp->GetAddressOfIMT()
2272 __ movq(temp,
2273 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2274 // temp = temp->GetImtEntryAt(method_offset);
2275 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07002276 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002277 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002278 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002279 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002280 __ call(Address(temp,
2281 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002282
2283 DCHECK(!codegen_->IsLeafMethod());
2284 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2285}
2286
Roland Levillain88cb1752014-10-20 16:36:47 +01002287void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2288 LocationSummary* locations =
2289 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2290 switch (neg->GetResultType()) {
2291 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002292 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002293 locations->SetInAt(0, Location::RequiresRegister());
2294 locations->SetOut(Location::SameAsFirstInput());
2295 break;
2296
Roland Levillain88cb1752014-10-20 16:36:47 +01002297 case Primitive::kPrimFloat:
2298 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002299 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002300 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002301 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002302 break;
2303
2304 default:
2305 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2306 }
2307}
2308
2309void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2310 LocationSummary* locations = neg->GetLocations();
2311 Location out = locations->Out();
2312 Location in = locations->InAt(0);
2313 switch (neg->GetResultType()) {
2314 case Primitive::kPrimInt:
2315 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002316 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002317 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002318 break;
2319
2320 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002321 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002322 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002323 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002324 break;
2325
Roland Levillain5368c212014-11-27 15:03:41 +00002326 case Primitive::kPrimFloat: {
2327 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002328 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002329 // Implement float negation with an exclusive or with value
2330 // 0x80000000 (mask for bit 31, representing the sign of a
2331 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002332 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002333 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002334 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002335 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002336
Roland Levillain5368c212014-11-27 15:03:41 +00002337 case Primitive::kPrimDouble: {
2338 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002339 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002340 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002341 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002342 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002343 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002344 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002345 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002346 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002347
2348 default:
2349 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2350 }
2351}
2352
Roland Levillaindff1f282014-11-05 14:15:05 +00002353void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2354 LocationSummary* locations =
2355 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2356 Primitive::Type result_type = conversion->GetResultType();
2357 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002358 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002359
David Brazdilb2bd1c52015-03-25 11:17:37 +00002360 // The Java language does not allow treating boolean as an integral type but
2361 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002362
Roland Levillaindff1f282014-11-05 14:15:05 +00002363 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002364 case Primitive::kPrimByte:
2365 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002366 case Primitive::kPrimLong:
2367 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002368 case Primitive::kPrimBoolean:
2369 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002370 case Primitive::kPrimShort:
2371 case Primitive::kPrimInt:
2372 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002373 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002374 locations->SetInAt(0, Location::Any());
2375 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2376 break;
2377
2378 default:
2379 LOG(FATAL) << "Unexpected type conversion from " << input_type
2380 << " to " << result_type;
2381 }
2382 break;
2383
Roland Levillain01a8d712014-11-14 16:27:39 +00002384 case Primitive::kPrimShort:
2385 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002386 case Primitive::kPrimLong:
2387 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002388 case Primitive::kPrimBoolean:
2389 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002390 case Primitive::kPrimByte:
2391 case Primitive::kPrimInt:
2392 case Primitive::kPrimChar:
2393 // Processing a Dex `int-to-short' instruction.
2394 locations->SetInAt(0, Location::Any());
2395 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2396 break;
2397
2398 default:
2399 LOG(FATAL) << "Unexpected type conversion from " << input_type
2400 << " to " << result_type;
2401 }
2402 break;
2403
Roland Levillain946e1432014-11-11 17:35:19 +00002404 case Primitive::kPrimInt:
2405 switch (input_type) {
2406 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002407 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002408 locations->SetInAt(0, Location::Any());
2409 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2410 break;
2411
2412 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002413 // Processing a Dex `float-to-int' instruction.
2414 locations->SetInAt(0, Location::RequiresFpuRegister());
2415 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002416 break;
2417
Roland Levillain946e1432014-11-11 17:35:19 +00002418 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002419 // Processing a Dex `double-to-int' instruction.
2420 locations->SetInAt(0, Location::RequiresFpuRegister());
2421 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002422 break;
2423
2424 default:
2425 LOG(FATAL) << "Unexpected type conversion from " << input_type
2426 << " to " << result_type;
2427 }
2428 break;
2429
Roland Levillaindff1f282014-11-05 14:15:05 +00002430 case Primitive::kPrimLong:
2431 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002432 case Primitive::kPrimBoolean:
2433 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002434 case Primitive::kPrimByte:
2435 case Primitive::kPrimShort:
2436 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002437 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002438 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002439 // TODO: We would benefit from a (to-be-implemented)
2440 // Location::RegisterOrStackSlot requirement for this input.
2441 locations->SetInAt(0, Location::RequiresRegister());
2442 locations->SetOut(Location::RequiresRegister());
2443 break;
2444
2445 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002446 // Processing a Dex `float-to-long' instruction.
2447 locations->SetInAt(0, Location::RequiresFpuRegister());
2448 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002449 break;
2450
Roland Levillaindff1f282014-11-05 14:15:05 +00002451 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002452 // Processing a Dex `double-to-long' instruction.
2453 locations->SetInAt(0, Location::RequiresFpuRegister());
2454 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002455 break;
2456
2457 default:
2458 LOG(FATAL) << "Unexpected type conversion from " << input_type
2459 << " to " << result_type;
2460 }
2461 break;
2462
Roland Levillain981e4542014-11-14 11:47:14 +00002463 case Primitive::kPrimChar:
2464 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002465 case Primitive::kPrimLong:
2466 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002467 case Primitive::kPrimBoolean:
2468 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002469 case Primitive::kPrimByte:
2470 case Primitive::kPrimShort:
2471 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002472 // Processing a Dex `int-to-char' instruction.
2473 locations->SetInAt(0, Location::Any());
2474 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2475 break;
2476
2477 default:
2478 LOG(FATAL) << "Unexpected type conversion from " << input_type
2479 << " to " << result_type;
2480 }
2481 break;
2482
Roland Levillaindff1f282014-11-05 14:15:05 +00002483 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002484 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002485 case Primitive::kPrimBoolean:
2486 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002487 case Primitive::kPrimByte:
2488 case Primitive::kPrimShort:
2489 case Primitive::kPrimInt:
2490 case Primitive::kPrimChar:
2491 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002492 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002493 locations->SetOut(Location::RequiresFpuRegister());
2494 break;
2495
2496 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002497 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002498 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002499 locations->SetOut(Location::RequiresFpuRegister());
2500 break;
2501
Roland Levillaincff13742014-11-17 14:32:17 +00002502 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002503 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002504 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002505 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002506 break;
2507
2508 default:
2509 LOG(FATAL) << "Unexpected type conversion from " << input_type
2510 << " to " << result_type;
2511 };
2512 break;
2513
Roland Levillaindff1f282014-11-05 14:15:05 +00002514 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002515 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002516 case Primitive::kPrimBoolean:
2517 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002518 case Primitive::kPrimByte:
2519 case Primitive::kPrimShort:
2520 case Primitive::kPrimInt:
2521 case Primitive::kPrimChar:
2522 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002523 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002524 locations->SetOut(Location::RequiresFpuRegister());
2525 break;
2526
2527 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002528 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002529 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002530 locations->SetOut(Location::RequiresFpuRegister());
2531 break;
2532
Roland Levillaincff13742014-11-17 14:32:17 +00002533 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002534 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002535 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002536 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002537 break;
2538
2539 default:
2540 LOG(FATAL) << "Unexpected type conversion from " << input_type
2541 << " to " << result_type;
2542 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002543 break;
2544
2545 default:
2546 LOG(FATAL) << "Unexpected type conversion from " << input_type
2547 << " to " << result_type;
2548 }
2549}
2550
2551void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2552 LocationSummary* locations = conversion->GetLocations();
2553 Location out = locations->Out();
2554 Location in = locations->InAt(0);
2555 Primitive::Type result_type = conversion->GetResultType();
2556 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002557 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002558 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002559 case Primitive::kPrimByte:
2560 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002561 case Primitive::kPrimLong:
2562 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002563 case Primitive::kPrimBoolean:
2564 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002565 case Primitive::kPrimShort:
2566 case Primitive::kPrimInt:
2567 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002568 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002569 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002570 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002571 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002572 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002573 Address(CpuRegister(RSP), in.GetStackIndex()));
2574 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002575 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002576 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002577 }
2578 break;
2579
2580 default:
2581 LOG(FATAL) << "Unexpected type conversion from " << input_type
2582 << " to " << result_type;
2583 }
2584 break;
2585
Roland Levillain01a8d712014-11-14 16:27:39 +00002586 case Primitive::kPrimShort:
2587 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002588 case Primitive::kPrimLong:
2589 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002590 case Primitive::kPrimBoolean:
2591 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002592 case Primitive::kPrimByte:
2593 case Primitive::kPrimInt:
2594 case Primitive::kPrimChar:
2595 // Processing a Dex `int-to-short' instruction.
2596 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002597 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002598 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002599 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002600 Address(CpuRegister(RSP), in.GetStackIndex()));
2601 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002602 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002603 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002604 }
2605 break;
2606
2607 default:
2608 LOG(FATAL) << "Unexpected type conversion from " << input_type
2609 << " to " << result_type;
2610 }
2611 break;
2612
Roland Levillain946e1432014-11-11 17:35:19 +00002613 case Primitive::kPrimInt:
2614 switch (input_type) {
2615 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002616 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002617 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002618 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002619 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002620 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002621 Address(CpuRegister(RSP), in.GetStackIndex()));
2622 } else {
2623 DCHECK(in.IsConstant());
2624 DCHECK(in.GetConstant()->IsLongConstant());
2625 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002626 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002627 }
2628 break;
2629
Roland Levillain3f8f9362014-12-02 17:45:01 +00002630 case Primitive::kPrimFloat: {
2631 // Processing a Dex `float-to-int' instruction.
2632 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2633 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002634 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002635
2636 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002637 // if input >= (float)INT_MAX goto done
2638 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002639 __ j(kAboveEqual, &done);
2640 // if input == NaN goto nan
2641 __ j(kUnordered, &nan);
2642 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002643 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002644 __ jmp(&done);
2645 __ Bind(&nan);
2646 // output = 0
2647 __ xorl(output, output);
2648 __ Bind(&done);
2649 break;
2650 }
2651
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002652 case Primitive::kPrimDouble: {
2653 // Processing a Dex `double-to-int' instruction.
2654 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2655 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002656 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002657
2658 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002659 // if input >= (double)INT_MAX goto done
2660 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002661 __ j(kAboveEqual, &done);
2662 // if input == NaN goto nan
2663 __ j(kUnordered, &nan);
2664 // output = double-to-int-truncate(input)
2665 __ cvttsd2si(output, input);
2666 __ jmp(&done);
2667 __ Bind(&nan);
2668 // output = 0
2669 __ xorl(output, output);
2670 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002671 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002672 }
Roland Levillain946e1432014-11-11 17:35:19 +00002673
2674 default:
2675 LOG(FATAL) << "Unexpected type conversion from " << input_type
2676 << " to " << result_type;
2677 }
2678 break;
2679
Roland Levillaindff1f282014-11-05 14:15:05 +00002680 case Primitive::kPrimLong:
2681 switch (input_type) {
2682 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002683 case Primitive::kPrimBoolean:
2684 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002685 case Primitive::kPrimByte:
2686 case Primitive::kPrimShort:
2687 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002688 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002689 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002690 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002691 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002692 break;
2693
Roland Levillain624279f2014-12-04 11:54:28 +00002694 case Primitive::kPrimFloat: {
2695 // Processing a Dex `float-to-long' instruction.
2696 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2697 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002698 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002699
Mark Mendell92e83bf2015-05-07 11:25:03 -04002700 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002701 // if input >= (float)LONG_MAX goto done
2702 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002703 __ j(kAboveEqual, &done);
2704 // if input == NaN goto nan
2705 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002706 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002707 __ cvttss2si(output, input, true);
2708 __ jmp(&done);
2709 __ Bind(&nan);
2710 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002711 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002712 __ Bind(&done);
2713 break;
2714 }
2715
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002716 case Primitive::kPrimDouble: {
2717 // Processing a Dex `double-to-long' instruction.
2718 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2719 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002720 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002721
Mark Mendell92e83bf2015-05-07 11:25:03 -04002722 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002723 // if input >= (double)LONG_MAX goto done
2724 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002725 __ j(kAboveEqual, &done);
2726 // if input == NaN goto nan
2727 __ j(kUnordered, &nan);
2728 // output = double-to-long-truncate(input)
2729 __ cvttsd2si(output, input, true);
2730 __ jmp(&done);
2731 __ Bind(&nan);
2732 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002733 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002734 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002735 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002736 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002737
2738 default:
2739 LOG(FATAL) << "Unexpected type conversion from " << input_type
2740 << " to " << result_type;
2741 }
2742 break;
2743
Roland Levillain981e4542014-11-14 11:47:14 +00002744 case Primitive::kPrimChar:
2745 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002746 case Primitive::kPrimLong:
2747 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002748 case Primitive::kPrimBoolean:
2749 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002750 case Primitive::kPrimByte:
2751 case Primitive::kPrimShort:
2752 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002753 // Processing a Dex `int-to-char' instruction.
2754 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002755 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002756 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002757 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002758 Address(CpuRegister(RSP), in.GetStackIndex()));
2759 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002760 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002761 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002762 }
2763 break;
2764
2765 default:
2766 LOG(FATAL) << "Unexpected type conversion from " << input_type
2767 << " to " << result_type;
2768 }
2769 break;
2770
Roland Levillaindff1f282014-11-05 14:15:05 +00002771 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002772 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002773 case Primitive::kPrimBoolean:
2774 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002775 case Primitive::kPrimByte:
2776 case Primitive::kPrimShort:
2777 case Primitive::kPrimInt:
2778 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002779 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002780 if (in.IsRegister()) {
2781 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2782 } else if (in.IsConstant()) {
2783 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2784 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002785 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002786 } else {
2787 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2788 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2789 }
Roland Levillaincff13742014-11-17 14:32:17 +00002790 break;
2791
2792 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002793 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002794 if (in.IsRegister()) {
2795 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2796 } else if (in.IsConstant()) {
2797 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2798 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002799 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002800 } else {
2801 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2802 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2803 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002804 break;
2805
Roland Levillaincff13742014-11-17 14:32:17 +00002806 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002807 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002808 if (in.IsFpuRegister()) {
2809 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2810 } else if (in.IsConstant()) {
2811 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2812 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002813 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002814 } else {
2815 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2816 Address(CpuRegister(RSP), in.GetStackIndex()));
2817 }
Roland Levillaincff13742014-11-17 14:32:17 +00002818 break;
2819
2820 default:
2821 LOG(FATAL) << "Unexpected type conversion from " << input_type
2822 << " to " << result_type;
2823 };
2824 break;
2825
Roland Levillaindff1f282014-11-05 14:15:05 +00002826 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002827 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002828 case Primitive::kPrimBoolean:
2829 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002830 case Primitive::kPrimByte:
2831 case Primitive::kPrimShort:
2832 case Primitive::kPrimInt:
2833 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002834 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002835 if (in.IsRegister()) {
2836 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2837 } else if (in.IsConstant()) {
2838 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2839 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002840 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002841 } else {
2842 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2843 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2844 }
Roland Levillaincff13742014-11-17 14:32:17 +00002845 break;
2846
2847 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002848 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002849 if (in.IsRegister()) {
2850 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2851 } else if (in.IsConstant()) {
2852 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2853 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002854 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002855 } else {
2856 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2857 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2858 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002859 break;
2860
Roland Levillaincff13742014-11-17 14:32:17 +00002861 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002862 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002863 if (in.IsFpuRegister()) {
2864 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2865 } else if (in.IsConstant()) {
2866 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2867 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002868 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002869 } else {
2870 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2871 Address(CpuRegister(RSP), in.GetStackIndex()));
2872 }
Roland Levillaincff13742014-11-17 14:32:17 +00002873 break;
2874
2875 default:
2876 LOG(FATAL) << "Unexpected type conversion from " << input_type
2877 << " to " << result_type;
2878 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002879 break;
2880
2881 default:
2882 LOG(FATAL) << "Unexpected type conversion from " << input_type
2883 << " to " << result_type;
2884 }
2885}
2886
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002887void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002888 LocationSummary* locations =
2889 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002890 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002891 case Primitive::kPrimInt: {
2892 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002893 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2894 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002895 break;
2896 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002897
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002898 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002899 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002900 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002901 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002902 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002903 break;
2904 }
2905
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002906 case Primitive::kPrimDouble:
2907 case Primitive::kPrimFloat: {
2908 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002909 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002910 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002911 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002912 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002913
2914 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002915 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002916 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002917}
2918
2919void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2920 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002921 Location first = locations->InAt(0);
2922 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002923 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002924
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002925 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002926 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002927 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002928 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2929 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002930 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2931 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002932 } else {
2933 __ leal(out.AsRegister<CpuRegister>(), Address(
2934 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2935 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002936 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002937 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2938 __ addl(out.AsRegister<CpuRegister>(),
2939 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2940 } else {
2941 __ leal(out.AsRegister<CpuRegister>(), Address(
2942 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2943 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002944 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002945 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002946 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002947 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002948 break;
2949 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002950
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002951 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002952 if (second.IsRegister()) {
2953 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2954 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002955 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2956 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002957 } else {
2958 __ leaq(out.AsRegister<CpuRegister>(), Address(
2959 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2960 }
2961 } else {
2962 DCHECK(second.IsConstant());
2963 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2964 int32_t int32_value = Low32Bits(value);
2965 DCHECK_EQ(int32_value, value);
2966 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2967 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2968 } else {
2969 __ leaq(out.AsRegister<CpuRegister>(), Address(
2970 first.AsRegister<CpuRegister>(), int32_value));
2971 }
2972 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002973 break;
2974 }
2975
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002976 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002977 if (second.IsFpuRegister()) {
2978 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2979 } else if (second.IsConstant()) {
2980 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002981 codegen_->LiteralFloatAddress(
2982 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002983 } else {
2984 DCHECK(second.IsStackSlot());
2985 __ addss(first.AsFpuRegister<XmmRegister>(),
2986 Address(CpuRegister(RSP), second.GetStackIndex()));
2987 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002988 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002989 }
2990
2991 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002992 if (second.IsFpuRegister()) {
2993 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2994 } else if (second.IsConstant()) {
2995 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002996 codegen_->LiteralDoubleAddress(
2997 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002998 } else {
2999 DCHECK(second.IsDoubleStackSlot());
3000 __ addsd(first.AsFpuRegister<XmmRegister>(),
3001 Address(CpuRegister(RSP), second.GetStackIndex()));
3002 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003003 break;
3004 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003005
3006 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003007 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003008 }
3009}
3010
3011void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003012 LocationSummary* locations =
3013 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003014 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003015 case Primitive::kPrimInt: {
3016 locations->SetInAt(0, Location::RequiresRegister());
3017 locations->SetInAt(1, Location::Any());
3018 locations->SetOut(Location::SameAsFirstInput());
3019 break;
3020 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003021 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003022 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003023 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003024 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003025 break;
3026 }
Calin Juravle11351682014-10-23 15:38:15 +01003027 case Primitive::kPrimFloat:
3028 case Primitive::kPrimDouble: {
3029 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003030 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003031 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003032 break;
Calin Juravle11351682014-10-23 15:38:15 +01003033 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003034 default:
Calin Juravle11351682014-10-23 15:38:15 +01003035 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003036 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037}
3038
3039void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3040 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003041 Location first = locations->InAt(0);
3042 Location second = locations->InAt(1);
3043 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003044 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003045 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003046 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003047 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003048 } else if (second.IsConstant()) {
3049 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003050 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003051 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003052 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003053 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003054 break;
3055 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003056 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003057 if (second.IsConstant()) {
3058 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3059 DCHECK(IsInt<32>(value));
3060 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3061 } else {
3062 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3063 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003064 break;
3065 }
3066
Calin Juravle11351682014-10-23 15:38:15 +01003067 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003068 if (second.IsFpuRegister()) {
3069 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3070 } else if (second.IsConstant()) {
3071 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003072 codegen_->LiteralFloatAddress(
3073 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003074 } else {
3075 DCHECK(second.IsStackSlot());
3076 __ subss(first.AsFpuRegister<XmmRegister>(),
3077 Address(CpuRegister(RSP), second.GetStackIndex()));
3078 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003079 break;
Calin Juravle11351682014-10-23 15:38:15 +01003080 }
3081
3082 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003083 if (second.IsFpuRegister()) {
3084 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3085 } else if (second.IsConstant()) {
3086 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003087 codegen_->LiteralDoubleAddress(
3088 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003089 } else {
3090 DCHECK(second.IsDoubleStackSlot());
3091 __ subsd(first.AsFpuRegister<XmmRegister>(),
3092 Address(CpuRegister(RSP), second.GetStackIndex()));
3093 }
Calin Juravle11351682014-10-23 15:38:15 +01003094 break;
3095 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003096
3097 default:
Calin Juravle11351682014-10-23 15:38:15 +01003098 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003099 }
3100}
3101
Calin Juravle34bacdf2014-10-07 20:23:36 +01003102void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3103 LocationSummary* locations =
3104 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3105 switch (mul->GetResultType()) {
3106 case Primitive::kPrimInt: {
3107 locations->SetInAt(0, Location::RequiresRegister());
3108 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003109 if (mul->InputAt(1)->IsIntConstant()) {
3110 // Can use 3 operand multiply.
3111 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3112 } else {
3113 locations->SetOut(Location::SameAsFirstInput());
3114 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003115 break;
3116 }
3117 case Primitive::kPrimLong: {
3118 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003119 locations->SetInAt(1, Location::Any());
3120 if (mul->InputAt(1)->IsLongConstant() &&
3121 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003122 // Can use 3 operand multiply.
3123 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3124 } else {
3125 locations->SetOut(Location::SameAsFirstInput());
3126 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003127 break;
3128 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003129 case Primitive::kPrimFloat:
3130 case Primitive::kPrimDouble: {
3131 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003132 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003133 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003134 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003135 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003136
3137 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003138 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003139 }
3140}
3141
3142void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3143 LocationSummary* locations = mul->GetLocations();
3144 Location first = locations->InAt(0);
3145 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003146 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003147 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003148 case Primitive::kPrimInt:
3149 // The constant may have ended up in a register, so test explicitly to avoid
3150 // problems where the output may not be the same as the first operand.
3151 if (mul->InputAt(1)->IsIntConstant()) {
3152 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3153 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3154 } else if (second.IsRegister()) {
3155 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003156 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003157 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003158 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003159 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003160 __ imull(first.AsRegister<CpuRegister>(),
3161 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003162 }
3163 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003164 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003165 // The constant may have ended up in a register, so test explicitly to avoid
3166 // problems where the output may not be the same as the first operand.
3167 if (mul->InputAt(1)->IsLongConstant()) {
3168 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3169 if (IsInt<32>(value)) {
3170 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3171 Immediate(static_cast<int32_t>(value)));
3172 } else {
3173 // Have to use the constant area.
3174 DCHECK(first.Equals(out));
3175 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3176 }
3177 } else if (second.IsRegister()) {
3178 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003179 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003180 } else {
3181 DCHECK(second.IsDoubleStackSlot());
3182 DCHECK(first.Equals(out));
3183 __ imulq(first.AsRegister<CpuRegister>(),
3184 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003185 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003186 break;
3187 }
3188
Calin Juravleb5bfa962014-10-21 18:02:24 +01003189 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003190 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003191 if (second.IsFpuRegister()) {
3192 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3193 } else if (second.IsConstant()) {
3194 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003195 codegen_->LiteralFloatAddress(
3196 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003197 } else {
3198 DCHECK(second.IsStackSlot());
3199 __ mulss(first.AsFpuRegister<XmmRegister>(),
3200 Address(CpuRegister(RSP), second.GetStackIndex()));
3201 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003202 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003203 }
3204
3205 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003206 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003207 if (second.IsFpuRegister()) {
3208 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3209 } else if (second.IsConstant()) {
3210 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003211 codegen_->LiteralDoubleAddress(
3212 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003213 } else {
3214 DCHECK(second.IsDoubleStackSlot());
3215 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3216 Address(CpuRegister(RSP), second.GetStackIndex()));
3217 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003218 break;
3219 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003220
3221 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003222 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003223 }
3224}
3225
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003226void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3227 uint32_t stack_adjustment, bool is_float) {
3228 if (source.IsStackSlot()) {
3229 DCHECK(is_float);
3230 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3231 } else if (source.IsDoubleStackSlot()) {
3232 DCHECK(!is_float);
3233 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3234 } else {
3235 // Write the value to the temporary location on the stack and load to FP stack.
3236 if (is_float) {
3237 Location stack_temp = Location::StackSlot(temp_offset);
3238 codegen_->Move(stack_temp, source);
3239 __ flds(Address(CpuRegister(RSP), temp_offset));
3240 } else {
3241 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3242 codegen_->Move(stack_temp, source);
3243 __ fldl(Address(CpuRegister(RSP), temp_offset));
3244 }
3245 }
3246}
3247
3248void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3249 Primitive::Type type = rem->GetResultType();
3250 bool is_float = type == Primitive::kPrimFloat;
3251 size_t elem_size = Primitive::ComponentSize(type);
3252 LocationSummary* locations = rem->GetLocations();
3253 Location first = locations->InAt(0);
3254 Location second = locations->InAt(1);
3255 Location out = locations->Out();
3256
3257 // Create stack space for 2 elements.
3258 // TODO: enhance register allocator to ask for stack temporaries.
3259 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3260
3261 // Load the values to the FP stack in reverse order, using temporaries if needed.
3262 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3263 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3264
3265 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003266 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003267 __ Bind(&retry);
3268 __ fprem();
3269
3270 // Move FP status to AX.
3271 __ fstsw();
3272
3273 // And see if the argument reduction is complete. This is signaled by the
3274 // C2 FPU flag bit set to 0.
3275 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3276 __ j(kNotEqual, &retry);
3277
3278 // We have settled on the final value. Retrieve it into an XMM register.
3279 // Store FP top of stack to real stack.
3280 if (is_float) {
3281 __ fsts(Address(CpuRegister(RSP), 0));
3282 } else {
3283 __ fstl(Address(CpuRegister(RSP), 0));
3284 }
3285
3286 // Pop the 2 items from the FP stack.
3287 __ fucompp();
3288
3289 // Load the value from the stack into an XMM register.
3290 DCHECK(out.IsFpuRegister()) << out;
3291 if (is_float) {
3292 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3293 } else {
3294 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3295 }
3296
3297 // And remove the temporary stack space we allocated.
3298 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3299}
3300
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003301void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3302 DCHECK(instruction->IsDiv() || instruction->IsRem());
3303
3304 LocationSummary* locations = instruction->GetLocations();
3305 Location second = locations->InAt(1);
3306 DCHECK(second.IsConstant());
3307
3308 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3309 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003310 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003311
3312 DCHECK(imm == 1 || imm == -1);
3313
3314 switch (instruction->GetResultType()) {
3315 case Primitive::kPrimInt: {
3316 if (instruction->IsRem()) {
3317 __ xorl(output_register, output_register);
3318 } else {
3319 __ movl(output_register, input_register);
3320 if (imm == -1) {
3321 __ negl(output_register);
3322 }
3323 }
3324 break;
3325 }
3326
3327 case Primitive::kPrimLong: {
3328 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003329 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003330 } else {
3331 __ movq(output_register, input_register);
3332 if (imm == -1) {
3333 __ negq(output_register);
3334 }
3335 }
3336 break;
3337 }
3338
3339 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003340 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003341 }
3342}
3343
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003344void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003345 LocationSummary* locations = instruction->GetLocations();
3346 Location second = locations->InAt(1);
3347
3348 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3349 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3350
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003351 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003352 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3353 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003354
3355 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3356
3357 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003358 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003359 __ testl(numerator, numerator);
3360 __ cmov(kGreaterEqual, tmp, numerator);
3361 int shift = CTZ(imm);
3362 __ sarl(tmp, Immediate(shift));
3363
3364 if (imm < 0) {
3365 __ negl(tmp);
3366 }
3367
3368 __ movl(output_register, tmp);
3369 } else {
3370 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3371 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3372
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003373 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003374 __ addq(rdx, numerator);
3375 __ testq(numerator, numerator);
3376 __ cmov(kGreaterEqual, rdx, numerator);
3377 int shift = CTZ(imm);
3378 __ sarq(rdx, Immediate(shift));
3379
3380 if (imm < 0) {
3381 __ negq(rdx);
3382 }
3383
3384 __ movq(output_register, rdx);
3385 }
3386}
3387
3388void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3389 DCHECK(instruction->IsDiv() || instruction->IsRem());
3390
3391 LocationSummary* locations = instruction->GetLocations();
3392 Location second = locations->InAt(1);
3393
3394 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3395 : locations->GetTemp(0).AsRegister<CpuRegister>();
3396 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3397 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3398 : locations->Out().AsRegister<CpuRegister>();
3399 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3400
3401 DCHECK_EQ(RAX, eax.AsRegister());
3402 DCHECK_EQ(RDX, edx.AsRegister());
3403 if (instruction->IsDiv()) {
3404 DCHECK_EQ(RAX, out.AsRegister());
3405 } else {
3406 DCHECK_EQ(RDX, out.AsRegister());
3407 }
3408
3409 int64_t magic;
3410 int shift;
3411
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003412 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003413 if (instruction->GetResultType() == Primitive::kPrimInt) {
3414 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3415
3416 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3417
3418 __ movl(numerator, eax);
3419
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003420 __ movl(eax, Immediate(magic));
3421 __ imull(numerator);
3422
3423 if (imm > 0 && magic < 0) {
3424 __ addl(edx, numerator);
3425 } else if (imm < 0 && magic > 0) {
3426 __ subl(edx, numerator);
3427 }
3428
3429 if (shift != 0) {
3430 __ sarl(edx, Immediate(shift));
3431 }
3432
3433 __ movl(eax, edx);
3434 __ shrl(edx, Immediate(31));
3435 __ addl(edx, eax);
3436
3437 if (instruction->IsRem()) {
3438 __ movl(eax, numerator);
3439 __ imull(edx, Immediate(imm));
3440 __ subl(eax, edx);
3441 __ movl(edx, eax);
3442 } else {
3443 __ movl(eax, edx);
3444 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003445 } else {
3446 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3447
3448 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3449
3450 CpuRegister rax = eax;
3451 CpuRegister rdx = edx;
3452
3453 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3454
3455 // Save the numerator.
3456 __ movq(numerator, rax);
3457
3458 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003459 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003460
3461 // RDX:RAX = magic * numerator
3462 __ imulq(numerator);
3463
3464 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003465 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003466 __ addq(rdx, numerator);
3467 } else if (imm < 0 && magic > 0) {
3468 // RDX -= numerator
3469 __ subq(rdx, numerator);
3470 }
3471
3472 // Shift if needed.
3473 if (shift != 0) {
3474 __ sarq(rdx, Immediate(shift));
3475 }
3476
3477 // RDX += 1 if RDX < 0
3478 __ movq(rax, rdx);
3479 __ shrq(rdx, Immediate(63));
3480 __ addq(rdx, rax);
3481
3482 if (instruction->IsRem()) {
3483 __ movq(rax, numerator);
3484
3485 if (IsInt<32>(imm)) {
3486 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3487 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003488 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003489 }
3490
3491 __ subq(rax, rdx);
3492 __ movq(rdx, rax);
3493 } else {
3494 __ movq(rax, rdx);
3495 }
3496 }
3497}
3498
Calin Juravlebacfec32014-11-14 15:54:36 +00003499void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3500 DCHECK(instruction->IsDiv() || instruction->IsRem());
3501 Primitive::Type type = instruction->GetResultType();
3502 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3503
3504 bool is_div = instruction->IsDiv();
3505 LocationSummary* locations = instruction->GetLocations();
3506
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003507 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3508 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003509
Roland Levillain271ab9c2014-11-27 15:23:57 +00003510 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003511 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003512
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003514 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003515
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003516 if (imm == 0) {
3517 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3518 } else if (imm == 1 || imm == -1) {
3519 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003520 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003521 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003522 } else {
3523 DCHECK(imm <= -2 || imm >= 2);
3524 GenerateDivRemWithAnyConstant(instruction);
3525 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003526 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003527 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003528 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003529 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003530 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003531
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003532 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3533 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3534 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3535 // so it's safe to just use negl instead of more complex comparisons.
3536 if (type == Primitive::kPrimInt) {
3537 __ cmpl(second_reg, Immediate(-1));
3538 __ j(kEqual, slow_path->GetEntryLabel());
3539 // edx:eax <- sign-extended of eax
3540 __ cdq();
3541 // eax = quotient, edx = remainder
3542 __ idivl(second_reg);
3543 } else {
3544 __ cmpq(second_reg, Immediate(-1));
3545 __ j(kEqual, slow_path->GetEntryLabel());
3546 // rdx:rax <- sign-extended of rax
3547 __ cqo();
3548 // rax = quotient, rdx = remainder
3549 __ idivq(second_reg);
3550 }
3551 __ Bind(slow_path->GetExitLabel());
3552 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003553}
3554
Calin Juravle7c4954d2014-10-28 16:57:40 +00003555void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3556 LocationSummary* locations =
3557 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3558 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003559 case Primitive::kPrimInt:
3560 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003561 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003562 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003563 locations->SetOut(Location::SameAsFirstInput());
3564 // Intel uses edx:eax as the dividend.
3565 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003566 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3567 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3568 // output and request another temp.
3569 if (div->InputAt(1)->IsConstant()) {
3570 locations->AddTemp(Location::RequiresRegister());
3571 }
Calin Juravled0d48522014-11-04 16:40:20 +00003572 break;
3573 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003574
Calin Juravle7c4954d2014-10-28 16:57:40 +00003575 case Primitive::kPrimFloat:
3576 case Primitive::kPrimDouble: {
3577 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003578 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003579 locations->SetOut(Location::SameAsFirstInput());
3580 break;
3581 }
3582
3583 default:
3584 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3585 }
3586}
3587
3588void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3589 LocationSummary* locations = div->GetLocations();
3590 Location first = locations->InAt(0);
3591 Location second = locations->InAt(1);
3592 DCHECK(first.Equals(locations->Out()));
3593
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003594 Primitive::Type type = div->GetResultType();
3595 switch (type) {
3596 case Primitive::kPrimInt:
3597 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003598 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003599 break;
3600 }
3601
Calin Juravle7c4954d2014-10-28 16:57:40 +00003602 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003603 if (second.IsFpuRegister()) {
3604 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3605 } else if (second.IsConstant()) {
3606 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003607 codegen_->LiteralFloatAddress(
3608 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003609 } else {
3610 DCHECK(second.IsStackSlot());
3611 __ divss(first.AsFpuRegister<XmmRegister>(),
3612 Address(CpuRegister(RSP), second.GetStackIndex()));
3613 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003614 break;
3615 }
3616
3617 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003618 if (second.IsFpuRegister()) {
3619 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3620 } else if (second.IsConstant()) {
3621 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003622 codegen_->LiteralDoubleAddress(
3623 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003624 } else {
3625 DCHECK(second.IsDoubleStackSlot());
3626 __ divsd(first.AsFpuRegister<XmmRegister>(),
3627 Address(CpuRegister(RSP), second.GetStackIndex()));
3628 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003629 break;
3630 }
3631
3632 default:
3633 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3634 }
3635}
3636
Calin Juravlebacfec32014-11-14 15:54:36 +00003637void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003638 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003639 LocationSummary* locations =
3640 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003641
3642 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003643 case Primitive::kPrimInt:
3644 case Primitive::kPrimLong: {
3645 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003646 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003647 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3648 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003649 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3650 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3651 // output and request another temp.
3652 if (rem->InputAt(1)->IsConstant()) {
3653 locations->AddTemp(Location::RequiresRegister());
3654 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003655 break;
3656 }
3657
3658 case Primitive::kPrimFloat:
3659 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003660 locations->SetInAt(0, Location::Any());
3661 locations->SetInAt(1, Location::Any());
3662 locations->SetOut(Location::RequiresFpuRegister());
3663 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003664 break;
3665 }
3666
3667 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003668 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003669 }
3670}
3671
3672void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3673 Primitive::Type type = rem->GetResultType();
3674 switch (type) {
3675 case Primitive::kPrimInt:
3676 case Primitive::kPrimLong: {
3677 GenerateDivRemIntegral(rem);
3678 break;
3679 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003680 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003681 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003682 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003683 break;
3684 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003685 default:
3686 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3687 }
3688}
3689
Calin Juravled0d48522014-11-04 16:40:20 +00003690void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003691 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3692 ? LocationSummary::kCallOnSlowPath
3693 : LocationSummary::kNoCall;
3694 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003695 locations->SetInAt(0, Location::Any());
3696 if (instruction->HasUses()) {
3697 locations->SetOut(Location::SameAsFirstInput());
3698 }
3699}
3700
3701void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003702 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003703 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3704 codegen_->AddSlowPath(slow_path);
3705
3706 LocationSummary* locations = instruction->GetLocations();
3707 Location value = locations->InAt(0);
3708
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003709 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003710 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003711 case Primitive::kPrimByte:
3712 case Primitive::kPrimChar:
3713 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003714 case Primitive::kPrimInt: {
3715 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003716 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003717 __ j(kEqual, slow_path->GetEntryLabel());
3718 } else if (value.IsStackSlot()) {
3719 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3720 __ j(kEqual, slow_path->GetEntryLabel());
3721 } else {
3722 DCHECK(value.IsConstant()) << value;
3723 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3724 __ jmp(slow_path->GetEntryLabel());
3725 }
3726 }
3727 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003728 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003729 case Primitive::kPrimLong: {
3730 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003731 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003732 __ j(kEqual, slow_path->GetEntryLabel());
3733 } else if (value.IsDoubleStackSlot()) {
3734 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3735 __ j(kEqual, slow_path->GetEntryLabel());
3736 } else {
3737 DCHECK(value.IsConstant()) << value;
3738 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3739 __ jmp(slow_path->GetEntryLabel());
3740 }
3741 }
3742 break;
3743 }
3744 default:
3745 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003746 }
Calin Juravled0d48522014-11-04 16:40:20 +00003747}
3748
Calin Juravle9aec02f2014-11-18 23:06:35 +00003749void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3750 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3751
3752 LocationSummary* locations =
3753 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3754
3755 switch (op->GetResultType()) {
3756 case Primitive::kPrimInt:
3757 case Primitive::kPrimLong: {
3758 locations->SetInAt(0, Location::RequiresRegister());
3759 // The shift count needs to be in CL.
3760 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3761 locations->SetOut(Location::SameAsFirstInput());
3762 break;
3763 }
3764 default:
3765 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3766 }
3767}
3768
3769void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3770 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3771
3772 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003773 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003774 Location second = locations->InAt(1);
3775
3776 switch (op->GetResultType()) {
3777 case Primitive::kPrimInt: {
3778 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003779 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003780 if (op->IsShl()) {
3781 __ shll(first_reg, second_reg);
3782 } else if (op->IsShr()) {
3783 __ sarl(first_reg, second_reg);
3784 } else {
3785 __ shrl(first_reg, second_reg);
3786 }
3787 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003788 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003789 if (op->IsShl()) {
3790 __ shll(first_reg, imm);
3791 } else if (op->IsShr()) {
3792 __ sarl(first_reg, imm);
3793 } else {
3794 __ shrl(first_reg, imm);
3795 }
3796 }
3797 break;
3798 }
3799 case Primitive::kPrimLong: {
3800 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003801 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003802 if (op->IsShl()) {
3803 __ shlq(first_reg, second_reg);
3804 } else if (op->IsShr()) {
3805 __ sarq(first_reg, second_reg);
3806 } else {
3807 __ shrq(first_reg, second_reg);
3808 }
3809 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003810 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003811 if (op->IsShl()) {
3812 __ shlq(first_reg, imm);
3813 } else if (op->IsShr()) {
3814 __ sarq(first_reg, imm);
3815 } else {
3816 __ shrq(first_reg, imm);
3817 }
3818 }
3819 break;
3820 }
3821 default:
3822 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003823 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003824 }
3825}
3826
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003827void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3828 LocationSummary* locations =
3829 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3830
3831 switch (ror->GetResultType()) {
3832 case Primitive::kPrimInt:
3833 case Primitive::kPrimLong: {
3834 locations->SetInAt(0, Location::RequiresRegister());
3835 // The shift count needs to be in CL (unless it is a constant).
3836 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3837 locations->SetOut(Location::SameAsFirstInput());
3838 break;
3839 }
3840 default:
3841 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3842 UNREACHABLE();
3843 }
3844}
3845
3846void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3847 LocationSummary* locations = ror->GetLocations();
3848 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3849 Location second = locations->InAt(1);
3850
3851 switch (ror->GetResultType()) {
3852 case Primitive::kPrimInt:
3853 if (second.IsRegister()) {
3854 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3855 __ rorl(first_reg, second_reg);
3856 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003857 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003858 __ rorl(first_reg, imm);
3859 }
3860 break;
3861 case Primitive::kPrimLong:
3862 if (second.IsRegister()) {
3863 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3864 __ rorq(first_reg, second_reg);
3865 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003866 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003867 __ rorq(first_reg, imm);
3868 }
3869 break;
3870 default:
3871 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3872 UNREACHABLE();
3873 }
3874}
3875
Calin Juravle9aec02f2014-11-18 23:06:35 +00003876void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3877 HandleShift(shl);
3878}
3879
3880void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3881 HandleShift(shl);
3882}
3883
3884void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3885 HandleShift(shr);
3886}
3887
3888void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3889 HandleShift(shr);
3890}
3891
3892void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3893 HandleShift(ushr);
3894}
3895
3896void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3897 HandleShift(ushr);
3898}
3899
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003900void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003901 LocationSummary* locations =
3902 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003903 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003904 if (instruction->IsStringAlloc()) {
3905 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3906 } else {
3907 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3908 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3909 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003910 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003911}
3912
3913void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003914 // Note: if heap poisoning is enabled, the entry point takes cares
3915 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003916 if (instruction->IsStringAlloc()) {
3917 // String is allocated through StringFactory. Call NewEmptyString entry point.
3918 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3919 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3920 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3921 __ call(Address(temp, code_offset.SizeValue()));
3922 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3923 } else {
3924 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3925 instruction,
3926 instruction->GetDexPc(),
3927 nullptr);
3928 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3929 DCHECK(!codegen_->IsLeafMethod());
3930 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003931}
3932
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003933void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3934 LocationSummary* locations =
3935 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3936 InvokeRuntimeCallingConvention calling_convention;
3937 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003938 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003939 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003940 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003941}
3942
3943void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3944 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003945 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3946 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003947 // Note: if heap poisoning is enabled, the entry point takes cares
3948 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003949 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3950 instruction,
3951 instruction->GetDexPc(),
3952 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003953 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003954
3955 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003956}
3957
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003958void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003959 LocationSummary* locations =
3960 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003961 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3962 if (location.IsStackSlot()) {
3963 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3964 } else if (location.IsDoubleStackSlot()) {
3965 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3966 }
3967 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003968}
3969
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003970void InstructionCodeGeneratorX86_64::VisitParameterValue(
3971 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003972 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003973}
3974
3975void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3976 LocationSummary* locations =
3977 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3978 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3979}
3980
3981void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3982 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3983 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003984}
3985
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003986void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3987 LocationSummary* locations =
3988 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3989 locations->SetInAt(0, Location::RequiresRegister());
3990 locations->SetOut(Location::RequiresRegister());
3991}
3992
3993void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3994 LocationSummary* locations = instruction->GetLocations();
3995 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00003996 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003997 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3998 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
3999 } else {
Nelli Kimbadee982016-05-13 13:08:53 +03004000 __ movq(locations->Out().AsRegister<CpuRegister>(),
4001 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4002 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
4003 method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07004004 instruction->GetIndex(), kX86_64PointerSize));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004005 }
4006 __ movq(locations->Out().AsRegister<CpuRegister>(),
4007 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
4008}
4009
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004010void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004011 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004012 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004013 locations->SetInAt(0, Location::RequiresRegister());
4014 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004015}
4016
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004017void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4018 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004019 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4020 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004021 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004022 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004023 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004024 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004025 break;
4026
4027 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004028 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004029 break;
4030
4031 default:
4032 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4033 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004034}
4035
David Brazdil66d126e2015-04-03 16:02:44 +01004036void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4037 LocationSummary* locations =
4038 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4039 locations->SetInAt(0, Location::RequiresRegister());
4040 locations->SetOut(Location::SameAsFirstInput());
4041}
4042
4043void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004044 LocationSummary* locations = bool_not->GetLocations();
4045 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4046 locations->Out().AsRegister<CpuRegister>().AsRegister());
4047 Location out = locations->Out();
4048 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4049}
4050
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004051void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004052 LocationSummary* locations =
4053 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004054 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004055 locations->SetInAt(i, Location::Any());
4056 }
4057 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004058}
4059
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004060void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004061 LOG(FATAL) << "Unimplemented";
4062}
4063
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004064void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004065 /*
4066 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004067 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004068 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4069 */
4070 switch (kind) {
4071 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004072 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004073 break;
4074 }
4075 case MemBarrierKind::kAnyStore:
4076 case MemBarrierKind::kLoadAny:
4077 case MemBarrierKind::kStoreStore: {
4078 // nop
4079 break;
4080 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004081 case MemBarrierKind::kNTStoreStore:
4082 // Non-Temporal Store/Store needs an explicit fence.
4083 MemoryFence(/* non-temporal */ true);
4084 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004085 }
4086}
4087
4088void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4089 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4090
Roland Levillain0d5a2812015-11-13 10:07:31 +00004091 bool object_field_get_with_read_barrier =
4092 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004093 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004094 new (GetGraph()->GetArena()) LocationSummary(instruction,
4095 object_field_get_with_read_barrier ?
4096 LocationSummary::kCallOnSlowPath :
4097 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004098 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004099 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4100 locations->SetOut(Location::RequiresFpuRegister());
4101 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004102 // The output overlaps for an object field get when read barriers
4103 // are enabled: we do not want the move to overwrite the object's
4104 // location, as we need it to emit the read barrier.
4105 locations->SetOut(
4106 Location::RequiresRegister(),
4107 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004108 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004109 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4110 // We need a temporary register for the read barrier marking slow
4111 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4112 locations->AddTemp(Location::RequiresRegister());
4113 }
Calin Juravle52c48962014-12-16 17:02:57 +00004114}
4115
4116void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4117 const FieldInfo& field_info) {
4118 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4119
4120 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004121 Location base_loc = locations->InAt(0);
4122 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004123 Location out = locations->Out();
4124 bool is_volatile = field_info.IsVolatile();
4125 Primitive::Type field_type = field_info.GetFieldType();
4126 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4127
4128 switch (field_type) {
4129 case Primitive::kPrimBoolean: {
4130 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4131 break;
4132 }
4133
4134 case Primitive::kPrimByte: {
4135 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4136 break;
4137 }
4138
4139 case Primitive::kPrimShort: {
4140 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4141 break;
4142 }
4143
4144 case Primitive::kPrimChar: {
4145 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4146 break;
4147 }
4148
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004149 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004150 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4151 break;
4152 }
4153
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004154 case Primitive::kPrimNot: {
4155 // /* HeapReference<Object> */ out = *(base + offset)
4156 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4157 Location temp_loc = locations->GetTemp(0);
4158 // Note that a potential implicit null check is handled in this
4159 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4160 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4161 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4162 if (is_volatile) {
4163 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4164 }
4165 } else {
4166 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4167 codegen_->MaybeRecordImplicitNullCheck(instruction);
4168 if (is_volatile) {
4169 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4170 }
4171 // If read barriers are enabled, emit read barriers other than
4172 // Baker's using a slow path (and also unpoison the loaded
4173 // reference, if heap poisoning is enabled).
4174 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4175 }
4176 break;
4177 }
4178
Calin Juravle52c48962014-12-16 17:02:57 +00004179 case Primitive::kPrimLong: {
4180 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4181 break;
4182 }
4183
4184 case Primitive::kPrimFloat: {
4185 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4186 break;
4187 }
4188
4189 case Primitive::kPrimDouble: {
4190 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4191 break;
4192 }
4193
4194 case Primitive::kPrimVoid:
4195 LOG(FATAL) << "Unreachable type " << field_type;
4196 UNREACHABLE();
4197 }
4198
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004199 if (field_type == Primitive::kPrimNot) {
4200 // Potential implicit null checks, in the case of reference
4201 // fields, are handled in the previous switch statement.
4202 } else {
4203 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004204 }
Roland Levillain4d027112015-07-01 15:41:14 +01004205
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004206 if (is_volatile) {
4207 if (field_type == Primitive::kPrimNot) {
4208 // Memory barriers, in the case of references, are also handled
4209 // in the previous switch statement.
4210 } else {
4211 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4212 }
Roland Levillain4d027112015-07-01 15:41:14 +01004213 }
Calin Juravle52c48962014-12-16 17:02:57 +00004214}
4215
4216void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4217 const FieldInfo& field_info) {
4218 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4219
4220 LocationSummary* locations =
4221 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004222 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004223 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004224 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004225 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004226
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004227 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004228 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004229 if (is_volatile) {
4230 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4231 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4232 } else {
4233 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4234 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004235 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004236 if (is_volatile) {
4237 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4238 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4239 } else {
4240 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4241 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004242 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004243 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004244 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004245 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004246 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004247 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4248 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004249 locations->AddTemp(Location::RequiresRegister());
4250 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004251}
4252
Calin Juravle52c48962014-12-16 17:02:57 +00004253void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004254 const FieldInfo& field_info,
4255 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004256 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4257
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004258 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004259 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4260 Location value = locations->InAt(1);
4261 bool is_volatile = field_info.IsVolatile();
4262 Primitive::Type field_type = field_info.GetFieldType();
4263 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4264
4265 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004266 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004267 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004268
Mark Mendellea5af682015-10-22 17:35:49 -04004269 bool maybe_record_implicit_null_check_done = false;
4270
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004271 switch (field_type) {
4272 case Primitive::kPrimBoolean:
4273 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004274 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004275 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004276 __ movb(Address(base, offset), Immediate(v));
4277 } else {
4278 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4279 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004280 break;
4281 }
4282
4283 case Primitive::kPrimShort:
4284 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004285 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004286 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004287 __ movw(Address(base, offset), Immediate(v));
4288 } else {
4289 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4290 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004291 break;
4292 }
4293
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004294 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004295 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004296 if (value.IsConstant()) {
4297 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004298 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4299 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4300 // Note: if heap poisoning is enabled, no need to poison
4301 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004302 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004303 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004304 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4305 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4306 __ movl(temp, value.AsRegister<CpuRegister>());
4307 __ PoisonHeapReference(temp);
4308 __ movl(Address(base, offset), temp);
4309 } else {
4310 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4311 }
Mark Mendell40741f32015-04-20 22:10:34 -04004312 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004313 break;
4314 }
4315
4316 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004317 if (value.IsConstant()) {
4318 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004319 codegen_->MoveInt64ToAddress(Address(base, offset),
4320 Address(base, offset + sizeof(int32_t)),
4321 v,
4322 instruction);
4323 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004324 } else {
4325 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4326 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004327 break;
4328 }
4329
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004330 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004331 if (value.IsConstant()) {
4332 int32_t v =
4333 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4334 __ movl(Address(base, offset), Immediate(v));
4335 } else {
4336 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4337 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004338 break;
4339 }
4340
4341 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004342 if (value.IsConstant()) {
4343 int64_t v =
4344 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4345 codegen_->MoveInt64ToAddress(Address(base, offset),
4346 Address(base, offset + sizeof(int32_t)),
4347 v,
4348 instruction);
4349 maybe_record_implicit_null_check_done = true;
4350 } else {
4351 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4352 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004353 break;
4354 }
4355
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004356 case Primitive::kPrimVoid:
4357 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004358 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004359 }
Calin Juravle52c48962014-12-16 17:02:57 +00004360
Mark Mendellea5af682015-10-22 17:35:49 -04004361 if (!maybe_record_implicit_null_check_done) {
4362 codegen_->MaybeRecordImplicitNullCheck(instruction);
4363 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004364
4365 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4366 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4367 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004368 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004369 }
4370
Calin Juravle52c48962014-12-16 17:02:57 +00004371 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004372 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004373 }
4374}
4375
4376void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4377 HandleFieldSet(instruction, instruction->GetFieldInfo());
4378}
4379
4380void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004381 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004382}
4383
4384void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004385 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004386}
4387
4388void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004389 HandleFieldGet(instruction, instruction->GetFieldInfo());
4390}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004391
Calin Juravle52c48962014-12-16 17:02:57 +00004392void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4393 HandleFieldGet(instruction);
4394}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004395
Calin Juravle52c48962014-12-16 17:02:57 +00004396void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4397 HandleFieldGet(instruction, instruction->GetFieldInfo());
4398}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004399
Calin Juravle52c48962014-12-16 17:02:57 +00004400void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4401 HandleFieldSet(instruction, instruction->GetFieldInfo());
4402}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004403
Calin Juravle52c48962014-12-16 17:02:57 +00004404void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004405 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004406}
4407
Calin Juravlee460d1d2015-09-29 04:52:17 +01004408void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4409 HUnresolvedInstanceFieldGet* instruction) {
4410 FieldAccessCallingConventionX86_64 calling_convention;
4411 codegen_->CreateUnresolvedFieldLocationSummary(
4412 instruction, instruction->GetFieldType(), calling_convention);
4413}
4414
4415void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4416 HUnresolvedInstanceFieldGet* instruction) {
4417 FieldAccessCallingConventionX86_64 calling_convention;
4418 codegen_->GenerateUnresolvedFieldAccess(instruction,
4419 instruction->GetFieldType(),
4420 instruction->GetFieldIndex(),
4421 instruction->GetDexPc(),
4422 calling_convention);
4423}
4424
4425void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4426 HUnresolvedInstanceFieldSet* instruction) {
4427 FieldAccessCallingConventionX86_64 calling_convention;
4428 codegen_->CreateUnresolvedFieldLocationSummary(
4429 instruction, instruction->GetFieldType(), calling_convention);
4430}
4431
4432void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4433 HUnresolvedInstanceFieldSet* instruction) {
4434 FieldAccessCallingConventionX86_64 calling_convention;
4435 codegen_->GenerateUnresolvedFieldAccess(instruction,
4436 instruction->GetFieldType(),
4437 instruction->GetFieldIndex(),
4438 instruction->GetDexPc(),
4439 calling_convention);
4440}
4441
4442void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4443 HUnresolvedStaticFieldGet* instruction) {
4444 FieldAccessCallingConventionX86_64 calling_convention;
4445 codegen_->CreateUnresolvedFieldLocationSummary(
4446 instruction, instruction->GetFieldType(), calling_convention);
4447}
4448
4449void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4450 HUnresolvedStaticFieldGet* instruction) {
4451 FieldAccessCallingConventionX86_64 calling_convention;
4452 codegen_->GenerateUnresolvedFieldAccess(instruction,
4453 instruction->GetFieldType(),
4454 instruction->GetFieldIndex(),
4455 instruction->GetDexPc(),
4456 calling_convention);
4457}
4458
4459void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4460 HUnresolvedStaticFieldSet* instruction) {
4461 FieldAccessCallingConventionX86_64 calling_convention;
4462 codegen_->CreateUnresolvedFieldLocationSummary(
4463 instruction, instruction->GetFieldType(), calling_convention);
4464}
4465
4466void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4467 HUnresolvedStaticFieldSet* instruction) {
4468 FieldAccessCallingConventionX86_64 calling_convention;
4469 codegen_->GenerateUnresolvedFieldAccess(instruction,
4470 instruction->GetFieldType(),
4471 instruction->GetFieldIndex(),
4472 instruction->GetDexPc(),
4473 calling_convention);
4474}
4475
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004476void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004477 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4478 ? LocationSummary::kCallOnSlowPath
4479 : LocationSummary::kNoCall;
4480 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4481 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004482 ? Location::RequiresRegister()
4483 : Location::Any();
4484 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004485 if (instruction->HasUses()) {
4486 locations->SetOut(Location::SameAsFirstInput());
4487 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004488}
4489
Calin Juravle2ae48182016-03-16 14:05:09 +00004490void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4491 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004492 return;
4493 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004494 LocationSummary* locations = instruction->GetLocations();
4495 Location obj = locations->InAt(0);
4496
4497 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004498 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004499}
4500
Calin Juravle2ae48182016-03-16 14:05:09 +00004501void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004502 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004503 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004504
4505 LocationSummary* locations = instruction->GetLocations();
4506 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004507
4508 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004509 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004510 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004511 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004512 } else {
4513 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004514 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004515 __ jmp(slow_path->GetEntryLabel());
4516 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004517 }
4518 __ j(kEqual, slow_path->GetEntryLabel());
4519}
4520
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004521void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004522 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004523}
4524
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004525void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004526 bool object_array_get_with_read_barrier =
4527 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004528 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004529 new (GetGraph()->GetArena()) LocationSummary(instruction,
4530 object_array_get_with_read_barrier ?
4531 LocationSummary::kCallOnSlowPath :
4532 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004533 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004534 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004535 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4536 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4537 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004538 // The output overlaps for an object array get when read barriers
4539 // are enabled: we do not want the move to overwrite the array's
4540 // location, as we need it to emit the read barrier.
4541 locations->SetOut(
4542 Location::RequiresRegister(),
4543 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004544 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004545 // We need a temporary register for the read barrier marking slow
4546 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4547 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4548 locations->AddTemp(Location::RequiresRegister());
4549 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004550}
4551
4552void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4553 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004554 Location obj_loc = locations->InAt(0);
4555 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004556 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004557 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004558
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004559 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004560 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004561 case Primitive::kPrimBoolean: {
4562 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004563 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004564 if (index.IsConstant()) {
4565 __ movzxb(out, Address(obj,
4566 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4567 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004568 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004569 }
4570 break;
4571 }
4572
4573 case Primitive::kPrimByte: {
4574 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004575 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004576 if (index.IsConstant()) {
4577 __ movsxb(out, Address(obj,
4578 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4579 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004580 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004581 }
4582 break;
4583 }
4584
4585 case Primitive::kPrimShort: {
4586 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004587 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004588 if (index.IsConstant()) {
4589 __ movsxw(out, Address(obj,
4590 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4591 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004592 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004593 }
4594 break;
4595 }
4596
4597 case Primitive::kPrimChar: {
4598 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004599 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004600 if (index.IsConstant()) {
4601 __ movzxw(out, Address(obj,
4602 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4603 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004604 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004605 }
4606 break;
4607 }
4608
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004609 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004610 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004611 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004612 if (index.IsConstant()) {
4613 __ movl(out, Address(obj,
4614 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4615 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004616 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004617 }
4618 break;
4619 }
4620
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004621 case Primitive::kPrimNot: {
4622 static_assert(
4623 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4624 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4625 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4626 // /* HeapReference<Object> */ out =
4627 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4628 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4629 Location temp = locations->GetTemp(0);
4630 // Note that a potential implicit null check is handled in this
4631 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4632 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4633 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4634 } else {
4635 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4636 if (index.IsConstant()) {
4637 uint32_t offset =
4638 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4639 __ movl(out, Address(obj, offset));
4640 codegen_->MaybeRecordImplicitNullCheck(instruction);
4641 // If read barriers are enabled, emit read barriers other than
4642 // Baker's using a slow path (and also unpoison the loaded
4643 // reference, if heap poisoning is enabled).
4644 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4645 } else {
4646 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4647 codegen_->MaybeRecordImplicitNullCheck(instruction);
4648 // If read barriers are enabled, emit read barriers other than
4649 // Baker's using a slow path (and also unpoison the loaded
4650 // reference, if heap poisoning is enabled).
4651 codegen_->MaybeGenerateReadBarrierSlow(
4652 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4653 }
4654 }
4655 break;
4656 }
4657
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004658 case Primitive::kPrimLong: {
4659 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004660 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004661 if (index.IsConstant()) {
4662 __ movq(out, Address(obj,
4663 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4664 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004665 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004666 }
4667 break;
4668 }
4669
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004670 case Primitive::kPrimFloat: {
4671 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004672 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004673 if (index.IsConstant()) {
4674 __ movss(out, Address(obj,
4675 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4676 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004677 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004678 }
4679 break;
4680 }
4681
4682 case Primitive::kPrimDouble: {
4683 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004684 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004685 if (index.IsConstant()) {
4686 __ movsd(out, Address(obj,
4687 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4688 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004689 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004690 }
4691 break;
4692 }
4693
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004694 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004695 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004696 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004697 }
Roland Levillain4d027112015-07-01 15:41:14 +01004698
4699 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004700 // Potential implicit null checks, in the case of reference
4701 // arrays, are handled in the previous switch statement.
4702 } else {
4703 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004704 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004705}
4706
4707void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004708 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004709
4710 bool needs_write_barrier =
4711 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004712 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004713 bool object_array_set_with_read_barrier =
4714 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004715
Nicolas Geoffray39468442014-09-02 15:17:15 +01004716 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004717 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004718 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004719 LocationSummary::kCallOnSlowPath :
4720 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004721
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004722 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004723 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4724 if (Primitive::IsFloatingPointType(value_type)) {
4725 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004726 } else {
4727 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4728 }
4729
4730 if (needs_write_barrier) {
4731 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004732
4733 // This first temporary register is possibly used for heap
4734 // reference poisoning and/or read barrier emission too.
4735 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004736 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004737 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004738}
4739
4740void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4741 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004742 Location array_loc = locations->InAt(0);
4743 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004744 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004745 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004746 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004747 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004748 bool needs_write_barrier =
4749 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004750 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4751 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4752 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004753
4754 switch (value_type) {
4755 case Primitive::kPrimBoolean:
4756 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004757 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4758 Address address = index.IsConstant()
4759 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4760 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4761 if (value.IsRegister()) {
4762 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004763 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004764 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004765 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004766 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004767 break;
4768 }
4769
4770 case Primitive::kPrimShort:
4771 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004772 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4773 Address address = index.IsConstant()
4774 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4775 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4776 if (value.IsRegister()) {
4777 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004778 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004779 DCHECK(value.IsConstant()) << value;
4780 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004781 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004782 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004783 break;
4784 }
4785
4786 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004787 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4788 Address address = index.IsConstant()
4789 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4790 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004791
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004792 if (!value.IsRegister()) {
4793 // Just setting null.
4794 DCHECK(instruction->InputAt(2)->IsNullConstant());
4795 DCHECK(value.IsConstant()) << value;
4796 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004797 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004798 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004799 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004800 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004801 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004802
4803 DCHECK(needs_write_barrier);
4804 CpuRegister register_value = value.AsRegister<CpuRegister>();
4805 NearLabel done, not_null, do_put;
4806 SlowPathCode* slow_path = nullptr;
4807 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004808 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004809 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4810 codegen_->AddSlowPath(slow_path);
4811 if (instruction->GetValueCanBeNull()) {
4812 __ testl(register_value, register_value);
4813 __ j(kNotEqual, &not_null);
4814 __ movl(address, Immediate(0));
4815 codegen_->MaybeRecordImplicitNullCheck(instruction);
4816 __ jmp(&done);
4817 __ Bind(&not_null);
4818 }
4819
Roland Levillain0d5a2812015-11-13 10:07:31 +00004820 if (kEmitCompilerReadBarrier) {
4821 // When read barriers are enabled, the type checking
4822 // instrumentation requires two read barriers:
4823 //
4824 // __ movl(temp2, temp);
4825 // // /* HeapReference<Class> */ temp = temp->component_type_
4826 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004827 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004828 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4829 //
4830 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4831 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004832 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004833 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4834 //
4835 // __ cmpl(temp, temp2);
4836 //
4837 // However, the second read barrier may trash `temp`, as it
4838 // is a temporary register, and as such would not be saved
4839 // along with live registers before calling the runtime (nor
4840 // restored afterwards). So in this case, we bail out and
4841 // delegate the work to the array set slow path.
4842 //
4843 // TODO: Extend the register allocator to support a new
4844 // "(locally) live temp" location so as to avoid always
4845 // going into the slow path when read barriers are enabled.
4846 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004847 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004848 // /* HeapReference<Class> */ temp = array->klass_
4849 __ movl(temp, Address(array, class_offset));
4850 codegen_->MaybeRecordImplicitNullCheck(instruction);
4851 __ MaybeUnpoisonHeapReference(temp);
4852
4853 // /* HeapReference<Class> */ temp = temp->component_type_
4854 __ movl(temp, Address(temp, component_offset));
4855 // If heap poisoning is enabled, no need to unpoison `temp`
4856 // nor the object reference in `register_value->klass`, as
4857 // we are comparing two poisoned references.
4858 __ cmpl(temp, Address(register_value, class_offset));
4859
4860 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4861 __ j(kEqual, &do_put);
4862 // If heap poisoning is enabled, the `temp` reference has
4863 // not been unpoisoned yet; unpoison it now.
4864 __ MaybeUnpoisonHeapReference(temp);
4865
4866 // /* HeapReference<Class> */ temp = temp->super_class_
4867 __ movl(temp, Address(temp, super_offset));
4868 // If heap poisoning is enabled, no need to unpoison
4869 // `temp`, as we are comparing against null below.
4870 __ testl(temp, temp);
4871 __ j(kNotEqual, slow_path->GetEntryLabel());
4872 __ Bind(&do_put);
4873 } else {
4874 __ j(kNotEqual, slow_path->GetEntryLabel());
4875 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004876 }
4877 }
4878
4879 if (kPoisonHeapReferences) {
4880 __ movl(temp, register_value);
4881 __ PoisonHeapReference(temp);
4882 __ movl(address, temp);
4883 } else {
4884 __ movl(address, register_value);
4885 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004886 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004887 codegen_->MaybeRecordImplicitNullCheck(instruction);
4888 }
4889
4890 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4891 codegen_->MarkGCCard(
4892 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4893 __ Bind(&done);
4894
4895 if (slow_path != nullptr) {
4896 __ Bind(slow_path->GetExitLabel());
4897 }
4898
4899 break;
4900 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004901
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004902 case Primitive::kPrimInt: {
4903 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4904 Address address = index.IsConstant()
4905 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4906 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4907 if (value.IsRegister()) {
4908 __ movl(address, value.AsRegister<CpuRegister>());
4909 } else {
4910 DCHECK(value.IsConstant()) << value;
4911 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4912 __ movl(address, Immediate(v));
4913 }
4914 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004915 break;
4916 }
4917
4918 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004919 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4920 Address address = index.IsConstant()
4921 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4922 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4923 if (value.IsRegister()) {
4924 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004925 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004926 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004927 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004928 Address address_high = index.IsConstant()
4929 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4930 offset + sizeof(int32_t))
4931 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4932 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004933 }
4934 break;
4935 }
4936
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004937 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004938 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4939 Address address = index.IsConstant()
4940 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4941 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004942 if (value.IsFpuRegister()) {
4943 __ movss(address, value.AsFpuRegister<XmmRegister>());
4944 } else {
4945 DCHECK(value.IsConstant());
4946 int32_t v =
4947 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4948 __ movl(address, Immediate(v));
4949 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004950 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004951 break;
4952 }
4953
4954 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004955 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4956 Address address = index.IsConstant()
4957 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4958 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004959 if (value.IsFpuRegister()) {
4960 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4961 codegen_->MaybeRecordImplicitNullCheck(instruction);
4962 } else {
4963 int64_t v =
4964 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4965 Address address_high = index.IsConstant()
4966 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4967 offset + sizeof(int32_t))
4968 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4969 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4970 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004971 break;
4972 }
4973
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004974 case Primitive::kPrimVoid:
4975 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004976 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004977 }
4978}
4979
4980void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004981 LocationSummary* locations =
4982 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004983 locations->SetInAt(0, Location::RequiresRegister());
4984 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004985}
4986
4987void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
4988 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01004989 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00004990 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4991 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004992 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004993 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004994}
4995
4996void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004997 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4998 ? LocationSummary::kCallOnSlowPath
4999 : LocationSummary::kNoCall;
5000 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005001 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04005002 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005003 if (instruction->HasUses()) {
5004 locations->SetOut(Location::SameAsFirstInput());
5005 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005006}
5007
5008void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5009 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005010 Location index_loc = locations->InAt(0);
5011 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005012 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005013 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005014
Mark Mendell99dbd682015-04-22 16:18:52 -04005015 if (length_loc.IsConstant()) {
5016 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5017 if (index_loc.IsConstant()) {
5018 // BCE will remove the bounds check if we are guarenteed to pass.
5019 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5020 if (index < 0 || index >= length) {
5021 codegen_->AddSlowPath(slow_path);
5022 __ jmp(slow_path->GetEntryLabel());
5023 } else {
5024 // Some optimization after BCE may have generated this, and we should not
5025 // generate a bounds check if it is a valid range.
5026 }
5027 return;
5028 }
5029
5030 // We have to reverse the jump condition because the length is the constant.
5031 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5032 __ cmpl(index_reg, Immediate(length));
5033 codegen_->AddSlowPath(slow_path);
5034 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005035 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005036 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5037 if (index_loc.IsConstant()) {
5038 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5039 __ cmpl(length, Immediate(value));
5040 } else {
5041 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5042 }
5043 codegen_->AddSlowPath(slow_path);
5044 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005045 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005046}
5047
5048void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5049 CpuRegister card,
5050 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005051 CpuRegister value,
5052 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005053 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005054 if (value_can_be_null) {
5055 __ testl(value, value);
5056 __ j(kEqual, &is_null);
5057 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005058 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5059 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005060 __ movq(temp, object);
5061 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005062 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005063 if (value_can_be_null) {
5064 __ Bind(&is_null);
5065 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005066}
5067
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005068void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005069 LOG(FATAL) << "Unimplemented";
5070}
5071
5072void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005073 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5074}
5075
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005076void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5077 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5078}
5079
5080void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005081 HBasicBlock* block = instruction->GetBlock();
5082 if (block->GetLoopInformation() != nullptr) {
5083 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5084 // The back edge will generate the suspend check.
5085 return;
5086 }
5087 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5088 // The goto will generate the suspend check.
5089 return;
5090 }
5091 GenerateSuspendCheck(instruction, nullptr);
5092}
5093
5094void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5095 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005096 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005097 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5098 if (slow_path == nullptr) {
5099 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5100 instruction->SetSlowPath(slow_path);
5101 codegen_->AddSlowPath(slow_path);
5102 if (successor != nullptr) {
5103 DCHECK(successor->IsLoopHeader());
5104 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5105 }
5106 } else {
5107 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5108 }
5109
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005110 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5111 /* no_rip */ true),
5112 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005113 if (successor == nullptr) {
5114 __ j(kNotEqual, slow_path->GetEntryLabel());
5115 __ Bind(slow_path->GetReturnLabel());
5116 } else {
5117 __ j(kEqual, codegen_->GetLabelOf(successor));
5118 __ jmp(slow_path->GetEntryLabel());
5119 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005120}
5121
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005122X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5123 return codegen_->GetAssembler();
5124}
5125
5126void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005127 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005128 Location source = move->GetSource();
5129 Location destination = move->GetDestination();
5130
5131 if (source.IsRegister()) {
5132 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005133 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005134 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005135 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005136 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005137 } else {
5138 DCHECK(destination.IsDoubleStackSlot());
5139 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005140 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005141 }
5142 } else if (source.IsStackSlot()) {
5143 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005144 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005145 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005146 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005147 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005148 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005149 } else {
5150 DCHECK(destination.IsStackSlot());
5151 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5152 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5153 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005154 } else if (source.IsDoubleStackSlot()) {
5155 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005156 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005157 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005158 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005159 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5160 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005161 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005162 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005163 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5164 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5165 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005166 } else if (source.IsConstant()) {
5167 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005168 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5169 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005170 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005171 if (value == 0) {
5172 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5173 } else {
5174 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5175 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005176 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005177 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005178 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005179 }
5180 } else if (constant->IsLongConstant()) {
5181 int64_t value = constant->AsLongConstant()->GetValue();
5182 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005183 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005184 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005185 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005186 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005187 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005188 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005189 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005190 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005191 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005192 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005193 } else {
5194 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005195 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005196 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5197 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005198 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005199 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005200 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005201 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005202 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005203 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005204 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005205 } else {
5206 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005207 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005208 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005209 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005210 } else if (source.IsFpuRegister()) {
5211 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005212 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005213 } else if (destination.IsStackSlot()) {
5214 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005215 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005216 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005217 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005218 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005219 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005220 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005221 }
5222}
5223
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005224void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005225 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005226 __ movl(Address(CpuRegister(RSP), mem), reg);
5227 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005228}
5229
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005230void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005231 ScratchRegisterScope ensure_scratch(
5232 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5233
5234 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5235 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5236 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5237 Address(CpuRegister(RSP), mem2 + stack_offset));
5238 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5239 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5240 CpuRegister(ensure_scratch.GetRegister()));
5241}
5242
Mark Mendell8a1c7282015-06-29 15:41:28 -04005243void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5244 __ movq(CpuRegister(TMP), reg1);
5245 __ movq(reg1, reg2);
5246 __ movq(reg2, CpuRegister(TMP));
5247}
5248
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005249void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5250 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5251 __ movq(Address(CpuRegister(RSP), mem), reg);
5252 __ movq(reg, CpuRegister(TMP));
5253}
5254
5255void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5256 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005257 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005258
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005259 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5260 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5261 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5262 Address(CpuRegister(RSP), mem2 + stack_offset));
5263 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5264 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5265 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005266}
5267
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005268void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5269 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5270 __ movss(Address(CpuRegister(RSP), mem), reg);
5271 __ movd(reg, CpuRegister(TMP));
5272}
5273
5274void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5275 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5276 __ movsd(Address(CpuRegister(RSP), mem), reg);
5277 __ movd(reg, CpuRegister(TMP));
5278}
5279
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005280void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005281 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005282 Location source = move->GetSource();
5283 Location destination = move->GetDestination();
5284
5285 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005286 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005287 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005288 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005289 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005290 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005291 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005292 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5293 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005294 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005295 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005296 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005297 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5298 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005299 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005300 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5301 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5302 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005303 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005304 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005305 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005306 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005307 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005308 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005309 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005310 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005311 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005312 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005313 }
5314}
5315
5316
5317void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5318 __ pushq(CpuRegister(reg));
5319}
5320
5321
5322void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5323 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005324}
5325
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005326void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005327 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005328 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5329 Immediate(mirror::Class::kStatusInitialized));
5330 __ j(kLess, slow_path->GetEntryLabel());
5331 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005332 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005333}
5334
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005335HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5336 HLoadClass::LoadKind desired_class_load_kind) {
5337 if (kEmitCompilerReadBarrier) {
5338 switch (desired_class_load_kind) {
5339 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5340 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5341 case HLoadClass::LoadKind::kBootImageAddress:
5342 // TODO: Implement for read barrier.
5343 return HLoadClass::LoadKind::kDexCacheViaMethod;
5344 default:
5345 break;
5346 }
5347 }
5348 switch (desired_class_load_kind) {
5349 case HLoadClass::LoadKind::kReferrersClass:
5350 break;
5351 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5352 DCHECK(!GetCompilerOptions().GetCompilePic());
5353 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5354 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5355 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5356 DCHECK(GetCompilerOptions().GetCompilePic());
5357 break;
5358 case HLoadClass::LoadKind::kBootImageAddress:
5359 break;
5360 case HLoadClass::LoadKind::kDexCacheAddress:
5361 DCHECK(Runtime::Current()->UseJitCompilation());
5362 break;
5363 case HLoadClass::LoadKind::kDexCachePcRelative:
5364 DCHECK(!Runtime::Current()->UseJitCompilation());
5365 break;
5366 case HLoadClass::LoadKind::kDexCacheViaMethod:
5367 break;
5368 }
5369 return desired_class_load_kind;
5370}
5371
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005372void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005373 if (cls->NeedsAccessCheck()) {
5374 InvokeRuntimeCallingConvention calling_convention;
5375 CodeGenerator::CreateLoadClassLocationSummary(
5376 cls,
5377 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5378 Location::RegisterLocation(RAX),
5379 /* code_generator_supports_read_barrier */ true);
5380 return;
5381 }
5382
5383 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
5384 ? LocationSummary::kCallOnSlowPath
5385 : LocationSummary::kNoCall;
5386 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
5387 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5388 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5389 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5390 locations->SetInAt(0, Location::RequiresRegister());
5391 }
5392 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005393}
5394
5395void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005396 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005397 if (cls->NeedsAccessCheck()) {
5398 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5399 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5400 cls,
5401 cls->GetDexPc(),
5402 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005403 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005404 return;
5405 }
5406
Roland Levillain0d5a2812015-11-13 10:07:31 +00005407 Location out_loc = locations->Out();
5408 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005409
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005410 bool generate_null_check = false;
5411 switch (cls->GetLoadKind()) {
5412 case HLoadClass::LoadKind::kReferrersClass: {
5413 DCHECK(!cls->CanCallRuntime());
5414 DCHECK(!cls->MustGenerateClinitCheck());
5415 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5416 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5417 GenerateGcRootFieldLoad(
5418 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5419 break;
5420 }
5421 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5422 DCHECK(!kEmitCompilerReadBarrier);
5423 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5424 codegen_->RecordTypePatch(cls);
5425 break;
5426 case HLoadClass::LoadKind::kBootImageAddress: {
5427 DCHECK(!kEmitCompilerReadBarrier);
5428 DCHECK_NE(cls->GetAddress(), 0u);
5429 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5430 __ movl(out, Immediate(address)); // Zero-extended.
5431 codegen_->RecordSimplePatch();
5432 break;
5433 }
5434 case HLoadClass::LoadKind::kDexCacheAddress: {
5435 DCHECK_NE(cls->GetAddress(), 0u);
5436 // /* GcRoot<mirror::Class> */ out = *address
5437 if (IsUint<32>(cls->GetAddress())) {
5438 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
5439 GenerateGcRootFieldLoad(cls, out_loc, address);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005440 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005441 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5442 __ movq(out, Immediate(cls->GetAddress()));
5443 GenerateGcRootFieldLoad(cls, out_loc, Address(out, 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005444 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005445 generate_null_check = !cls->IsInDexCache();
5446 break;
5447 }
5448 case HLoadClass::LoadKind::kDexCachePcRelative: {
5449 uint32_t offset = cls->GetDexCacheElementOffset();
5450 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5451 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5452 /* no_rip */ false);
5453 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5454 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label);
5455 generate_null_check = !cls->IsInDexCache();
5456 break;
5457 }
5458 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5459 // /* GcRoot<mirror::Class>[] */ out =
5460 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5461 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5462 __ movq(out,
5463 Address(current_method,
5464 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5465 // /* GcRoot<mirror::Class> */ out = out[type_index]
5466 GenerateGcRootFieldLoad(
5467 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
5468 generate_null_check = !cls->IsInDexCache();
5469 break;
5470 }
5471 default:
5472 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5473 UNREACHABLE();
5474 }
5475
5476 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5477 DCHECK(cls->CanCallRuntime());
5478 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5479 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5480 codegen_->AddSlowPath(slow_path);
5481 if (generate_null_check) {
5482 __ testl(out, out);
5483 __ j(kEqual, slow_path->GetEntryLabel());
5484 }
5485 if (cls->MustGenerateClinitCheck()) {
5486 GenerateClassInitializationCheck(slow_path, out);
5487 } else {
5488 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005489 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005490 }
5491}
5492
5493void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5494 LocationSummary* locations =
5495 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5496 locations->SetInAt(0, Location::RequiresRegister());
5497 if (check->HasUses()) {
5498 locations->SetOut(Location::SameAsFirstInput());
5499 }
5500}
5501
5502void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005503 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005504 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005505 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005506 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005507 GenerateClassInitializationCheck(slow_path,
5508 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005509}
5510
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005511HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5512 HLoadString::LoadKind desired_string_load_kind) {
5513 if (kEmitCompilerReadBarrier) {
5514 switch (desired_string_load_kind) {
5515 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5516 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5517 case HLoadString::LoadKind::kBootImageAddress:
5518 // TODO: Implement for read barrier.
5519 return HLoadString::LoadKind::kDexCacheViaMethod;
5520 default:
5521 break;
5522 }
5523 }
5524 switch (desired_string_load_kind) {
5525 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5526 DCHECK(!GetCompilerOptions().GetCompilePic());
5527 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5528 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5529 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5530 DCHECK(GetCompilerOptions().GetCompilePic());
5531 break;
5532 case HLoadString::LoadKind::kBootImageAddress:
5533 break;
5534 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005535 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005536 break;
5537 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005538 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005539 break;
5540 case HLoadString::LoadKind::kDexCacheViaMethod:
5541 break;
5542 }
5543 return desired_string_load_kind;
5544}
5545
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005546void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005547 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005548 ? LocationSummary::kCallOnSlowPath
5549 : LocationSummary::kNoCall;
5550 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005551 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5552 locations->SetInAt(0, Location::RequiresRegister());
5553 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005554 locations->SetOut(Location::RequiresRegister());
5555}
5556
5557void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005558 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005559 Location out_loc = locations->Out();
5560 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005561
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005562 switch (load->GetLoadKind()) {
5563 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5564 DCHECK(!kEmitCompilerReadBarrier);
5565 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5566 codegen_->RecordStringPatch(load);
5567 return; // No dex cache slow path.
5568 }
5569 case HLoadString::LoadKind::kBootImageAddress: {
5570 DCHECK(!kEmitCompilerReadBarrier);
5571 DCHECK_NE(load->GetAddress(), 0u);
5572 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5573 __ movl(out, Immediate(address)); // Zero-extended.
5574 codegen_->RecordSimplePatch();
5575 return; // No dex cache slow path.
5576 }
5577 case HLoadString::LoadKind::kDexCacheAddress: {
5578 DCHECK_NE(load->GetAddress(), 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005579 // /* GcRoot<mirror::String> */ out = *address
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005580 if (IsUint<32>(load->GetAddress())) {
5581 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5582 GenerateGcRootFieldLoad(load, out_loc, address);
5583 } else {
5584 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5585 __ movq(out, Immediate(load->GetAddress()));
5586 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5587 }
5588 break;
5589 }
5590 case HLoadString::LoadKind::kDexCachePcRelative: {
5591 uint32_t offset = load->GetDexCacheElementOffset();
5592 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5593 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5594 /* no_rip */ false);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005595 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005596 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5597 break;
5598 }
5599 case HLoadString::LoadKind::kDexCacheViaMethod: {
5600 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5601
5602 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5603 GenerateGcRootFieldLoad(
5604 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5605 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5606 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5607 // /* GcRoot<mirror::String> */ out = out[string_index]
5608 GenerateGcRootFieldLoad(
5609 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5610 break;
5611 }
5612 default:
5613 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5614 UNREACHABLE();
5615 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005616
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005617 if (!load->IsInDexCache()) {
5618 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5619 codegen_->AddSlowPath(slow_path);
5620 __ testl(out, out);
5621 __ j(kEqual, slow_path->GetEntryLabel());
5622 __ Bind(slow_path->GetExitLabel());
5623 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005624}
5625
David Brazdilcb1c0552015-08-04 16:22:25 +01005626static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005627 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5628 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005629}
5630
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005631void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5632 LocationSummary* locations =
5633 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5634 locations->SetOut(Location::RequiresRegister());
5635}
5636
5637void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005638 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5639}
5640
5641void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5642 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5643}
5644
5645void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5646 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005647}
5648
5649void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5650 LocationSummary* locations =
5651 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5652 InvokeRuntimeCallingConvention calling_convention;
5653 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5654}
5655
5656void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005657 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5658 instruction,
5659 instruction->GetDexPc(),
5660 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005661 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005662}
5663
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005664static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5665 return kEmitCompilerReadBarrier &&
5666 (kUseBakerReadBarrier ||
5667 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5668 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5669 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5670}
5671
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005672void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005673 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005674 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5675 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005676 case TypeCheckKind::kExactCheck:
5677 case TypeCheckKind::kAbstractClassCheck:
5678 case TypeCheckKind::kClassHierarchyCheck:
5679 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005680 call_kind =
5681 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005682 break;
5683 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005684 case TypeCheckKind::kUnresolvedCheck:
5685 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005686 call_kind = LocationSummary::kCallOnSlowPath;
5687 break;
5688 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005689
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005690 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005691 locations->SetInAt(0, Location::RequiresRegister());
5692 locations->SetInAt(1, Location::Any());
5693 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5694 locations->SetOut(Location::RequiresRegister());
5695 // When read barriers are enabled, we need a temporary register for
5696 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005697 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005698 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005699 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005700}
5701
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005702void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005703 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005704 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005705 Location obj_loc = locations->InAt(0);
5706 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005707 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005708 Location out_loc = locations->Out();
5709 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005710 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005711 locations->GetTemp(0) :
5712 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005713 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005714 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5715 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5716 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005717 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005718 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005719
5720 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005721 // Avoid null check if we know obj is not null.
5722 if (instruction->MustDoNullCheck()) {
5723 __ testl(obj, obj);
5724 __ j(kEqual, &zero);
5725 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005726
Roland Levillain0d5a2812015-11-13 10:07:31 +00005727 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005728 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005729
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005730 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005731 case TypeCheckKind::kExactCheck: {
5732 if (cls.IsRegister()) {
5733 __ cmpl(out, cls.AsRegister<CpuRegister>());
5734 } else {
5735 DCHECK(cls.IsStackSlot()) << cls;
5736 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5737 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005738 if (zero.IsLinked()) {
5739 // Classes must be equal for the instanceof to succeed.
5740 __ j(kNotEqual, &zero);
5741 __ movl(out, Immediate(1));
5742 __ jmp(&done);
5743 } else {
5744 __ setcc(kEqual, out);
5745 // setcc only sets the low byte.
5746 __ andl(out, Immediate(1));
5747 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005748 break;
5749 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005750
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005751 case TypeCheckKind::kAbstractClassCheck: {
5752 // If the class is abstract, we eagerly fetch the super class of the
5753 // object to avoid doing a comparison we know will fail.
5754 NearLabel loop, success;
5755 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005756 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005757 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005758 __ testl(out, out);
5759 // If `out` is null, we use it for the result, and jump to `done`.
5760 __ j(kEqual, &done);
5761 if (cls.IsRegister()) {
5762 __ cmpl(out, cls.AsRegister<CpuRegister>());
5763 } else {
5764 DCHECK(cls.IsStackSlot()) << cls;
5765 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5766 }
5767 __ j(kNotEqual, &loop);
5768 __ movl(out, Immediate(1));
5769 if (zero.IsLinked()) {
5770 __ jmp(&done);
5771 }
5772 break;
5773 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005775 case TypeCheckKind::kClassHierarchyCheck: {
5776 // Walk over the class hierarchy to find a match.
5777 NearLabel loop, success;
5778 __ Bind(&loop);
5779 if (cls.IsRegister()) {
5780 __ cmpl(out, cls.AsRegister<CpuRegister>());
5781 } else {
5782 DCHECK(cls.IsStackSlot()) << cls;
5783 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5784 }
5785 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005786 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005787 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005788 __ testl(out, out);
5789 __ j(kNotEqual, &loop);
5790 // If `out` is null, we use it for the result, and jump to `done`.
5791 __ jmp(&done);
5792 __ Bind(&success);
5793 __ movl(out, Immediate(1));
5794 if (zero.IsLinked()) {
5795 __ jmp(&done);
5796 }
5797 break;
5798 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005799
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005800 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005801 // Do an exact check.
5802 NearLabel exact_check;
5803 if (cls.IsRegister()) {
5804 __ cmpl(out, cls.AsRegister<CpuRegister>());
5805 } else {
5806 DCHECK(cls.IsStackSlot()) << cls;
5807 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5808 }
5809 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005810 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005811 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005812 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005813 __ testl(out, out);
5814 // If `out` is null, we use it for the result, and jump to `done`.
5815 __ j(kEqual, &done);
5816 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5817 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005818 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005819 __ movl(out, Immediate(1));
5820 __ jmp(&done);
5821 break;
5822 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005823
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005824 case TypeCheckKind::kArrayCheck: {
5825 if (cls.IsRegister()) {
5826 __ cmpl(out, cls.AsRegister<CpuRegister>());
5827 } else {
5828 DCHECK(cls.IsStackSlot()) << cls;
5829 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5830 }
5831 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005832 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5833 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005834 codegen_->AddSlowPath(slow_path);
5835 __ j(kNotEqual, slow_path->GetEntryLabel());
5836 __ movl(out, Immediate(1));
5837 if (zero.IsLinked()) {
5838 __ jmp(&done);
5839 }
5840 break;
5841 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005842
Calin Juravle98893e12015-10-02 21:05:03 +01005843 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005844 case TypeCheckKind::kInterfaceCheck: {
5845 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005846 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005847 // cases.
5848 //
5849 // We cannot directly call the InstanceofNonTrivial runtime
5850 // entry point without resorting to a type checking slow path
5851 // here (i.e. by calling InvokeRuntime directly), as it would
5852 // require to assign fixed registers for the inputs of this
5853 // HInstanceOf instruction (following the runtime calling
5854 // convention), which might be cluttered by the potential first
5855 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005856 //
5857 // TODO: Introduce a new runtime entry point taking the object
5858 // to test (instead of its class) as argument, and let it deal
5859 // with the read barrier issues. This will let us refactor this
5860 // case of the `switch` code as it was previously (with a direct
5861 // call to the runtime not using a type checking slow path).
5862 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005863 DCHECK(locations->OnlyCallsOnSlowPath());
5864 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5865 /* is_fatal */ false);
5866 codegen_->AddSlowPath(slow_path);
5867 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005868 if (zero.IsLinked()) {
5869 __ jmp(&done);
5870 }
5871 break;
5872 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005873 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005874
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005875 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005876 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005877 __ xorl(out, out);
5878 }
5879
5880 if (done.IsLinked()) {
5881 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005882 }
5883
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005884 if (slow_path != nullptr) {
5885 __ Bind(slow_path->GetExitLabel());
5886 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005887}
5888
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005889void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005890 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5891 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005892 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5893 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005894 case TypeCheckKind::kExactCheck:
5895 case TypeCheckKind::kAbstractClassCheck:
5896 case TypeCheckKind::kClassHierarchyCheck:
5897 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005898 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5899 LocationSummary::kCallOnSlowPath :
5900 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005901 break;
5902 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005903 case TypeCheckKind::kUnresolvedCheck:
5904 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005905 call_kind = LocationSummary::kCallOnSlowPath;
5906 break;
5907 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005908 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5909 locations->SetInAt(0, Location::RequiresRegister());
5910 locations->SetInAt(1, Location::Any());
5911 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5912 locations->AddTemp(Location::RequiresRegister());
5913 // When read barriers are enabled, we need an additional temporary
5914 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005915 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005916 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005917 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005918}
5919
5920void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005921 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005922 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005923 Location obj_loc = locations->InAt(0);
5924 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005925 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005926 Location temp_loc = locations->GetTemp(0);
5927 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005928 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005929 locations->GetTemp(1) :
5930 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005931 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5932 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5933 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5934 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005935
Roland Levillain0d5a2812015-11-13 10:07:31 +00005936 bool is_type_check_slow_path_fatal =
5937 (type_check_kind == TypeCheckKind::kExactCheck ||
5938 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5939 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5940 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5941 !instruction->CanThrowIntoCatchBlock();
5942 SlowPathCode* type_check_slow_path =
5943 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5944 is_type_check_slow_path_fatal);
5945 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005946
Roland Levillain0d5a2812015-11-13 10:07:31 +00005947 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005948 case TypeCheckKind::kExactCheck:
5949 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005950 NearLabel done;
5951 // Avoid null check if we know obj is not null.
5952 if (instruction->MustDoNullCheck()) {
5953 __ testl(obj, obj);
5954 __ j(kEqual, &done);
5955 }
5956
5957 // /* HeapReference<Class> */ temp = obj->klass_
5958 GenerateReferenceLoadTwoRegisters(
5959 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5960
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005961 if (cls.IsRegister()) {
5962 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5963 } else {
5964 DCHECK(cls.IsStackSlot()) << cls;
5965 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5966 }
5967 // Jump to slow path for throwing the exception or doing a
5968 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005969 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005970 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005971 break;
5972 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005973
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005974 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005975 NearLabel done;
5976 // Avoid null check if we know obj is not null.
5977 if (instruction->MustDoNullCheck()) {
5978 __ testl(obj, obj);
5979 __ j(kEqual, &done);
5980 }
5981
5982 // /* HeapReference<Class> */ temp = obj->klass_
5983 GenerateReferenceLoadTwoRegisters(
5984 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5985
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005986 // If the class is abstract, we eagerly fetch the super class of the
5987 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005988 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005989 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005990 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005991 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005992
5993 // If the class reference currently in `temp` is not null, jump
5994 // to the `compare_classes` label to compare it with the checked
5995 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005996 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005997 __ j(kNotEqual, &compare_classes);
5998 // Otherwise, jump to the slow path to throw the exception.
5999 //
6000 // But before, move back the object's class into `temp` before
6001 // going into the slow path, as it has been overwritten in the
6002 // meantime.
6003 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006004 GenerateReferenceLoadTwoRegisters(
6005 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006006 __ jmp(type_check_slow_path->GetEntryLabel());
6007
6008 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006009 if (cls.IsRegister()) {
6010 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6011 } else {
6012 DCHECK(cls.IsStackSlot()) << cls;
6013 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6014 }
6015 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006016 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006017 break;
6018 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006019
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006020 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006021 NearLabel done;
6022 // Avoid null check if we know obj is not null.
6023 if (instruction->MustDoNullCheck()) {
6024 __ testl(obj, obj);
6025 __ j(kEqual, &done);
6026 }
6027
6028 // /* HeapReference<Class> */ temp = obj->klass_
6029 GenerateReferenceLoadTwoRegisters(
6030 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6031
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006032 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006033 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006034 __ Bind(&loop);
6035 if (cls.IsRegister()) {
6036 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6037 } else {
6038 DCHECK(cls.IsStackSlot()) << cls;
6039 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6040 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006041 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006042
Roland Levillain0d5a2812015-11-13 10:07:31 +00006043 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006044 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006045
6046 // If the class reference currently in `temp` is not null, jump
6047 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006048 __ testl(temp, temp);
6049 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006050 // Otherwise, jump to the slow path to throw the exception.
6051 //
6052 // But before, move back the object's class into `temp` before
6053 // going into the slow path, as it has been overwritten in the
6054 // meantime.
6055 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006056 GenerateReferenceLoadTwoRegisters(
6057 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006058 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006059 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006060 break;
6061 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006062
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006063 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006064 // We cannot use a NearLabel here, as its range might be too
6065 // short in some cases when read barriers are enabled. This has
6066 // been observed for instance when the code emitted for this
6067 // case uses high x86-64 registers (R8-R15).
6068 Label done;
6069 // Avoid null check if we know obj is not null.
6070 if (instruction->MustDoNullCheck()) {
6071 __ testl(obj, obj);
6072 __ j(kEqual, &done);
6073 }
6074
6075 // /* HeapReference<Class> */ temp = obj->klass_
6076 GenerateReferenceLoadTwoRegisters(
6077 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6078
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006079 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006080 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006081 if (cls.IsRegister()) {
6082 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6083 } else {
6084 DCHECK(cls.IsStackSlot()) << cls;
6085 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6086 }
6087 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006088
6089 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006090 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006091 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006092
6093 // If the component type is not null (i.e. the object is indeed
6094 // an array), jump to label `check_non_primitive_component_type`
6095 // to further check that this component type is not a primitive
6096 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006097 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006098 __ j(kNotEqual, &check_non_primitive_component_type);
6099 // Otherwise, jump to the slow path to throw the exception.
6100 //
6101 // But before, move back the object's class into `temp` before
6102 // going into the slow path, as it has been overwritten in the
6103 // meantime.
6104 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006105 GenerateReferenceLoadTwoRegisters(
6106 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006107 __ jmp(type_check_slow_path->GetEntryLabel());
6108
6109 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006110 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006111 __ j(kEqual, &done);
6112 // Same comment as above regarding `temp` and the slow path.
6113 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006114 GenerateReferenceLoadTwoRegisters(
6115 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006116 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006117 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006118 break;
6119 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006120
Calin Juravle98893e12015-10-02 21:05:03 +01006121 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006122 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006123 NearLabel done;
6124 // Avoid null check if we know obj is not null.
6125 if (instruction->MustDoNullCheck()) {
6126 __ testl(obj, obj);
6127 __ j(kEqual, &done);
6128 }
6129
6130 // /* HeapReference<Class> */ temp = obj->klass_
6131 GenerateReferenceLoadTwoRegisters(
6132 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6133
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006134 // We always go into the type check slow path for the unresolved
6135 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006136 //
6137 // We cannot directly call the CheckCast runtime entry point
6138 // without resorting to a type checking slow path here (i.e. by
6139 // calling InvokeRuntime directly), as it would require to
6140 // assign fixed registers for the inputs of this HInstanceOf
6141 // instruction (following the runtime calling convention), which
6142 // might be cluttered by the potential first read barrier
6143 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006144 //
6145 // TODO: Introduce a new runtime entry point taking the object
6146 // to test (instead of its class) as argument, and let it deal
6147 // with the read barrier issues. This will let us refactor this
6148 // case of the `switch` code as it was previously (with a direct
6149 // call to the runtime not using a type checking slow path).
6150 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006151 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006152 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006153 break;
6154 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006155
Roland Levillain0d5a2812015-11-13 10:07:31 +00006156 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006157}
6158
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006159void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6160 LocationSummary* locations =
6161 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
6162 InvokeRuntimeCallingConvention calling_convention;
6163 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6164}
6165
6166void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006167 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6168 : QUICK_ENTRY_POINT(pUnlockObject),
6169 instruction,
6170 instruction->GetDexPc(),
6171 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006172 if (instruction->IsEnter()) {
6173 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6174 } else {
6175 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6176 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006177}
6178
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006179void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6180void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6181void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6182
6183void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6184 LocationSummary* locations =
6185 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6186 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6187 || instruction->GetResultType() == Primitive::kPrimLong);
6188 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006189 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006190 locations->SetOut(Location::SameAsFirstInput());
6191}
6192
6193void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6194 HandleBitwiseOperation(instruction);
6195}
6196
6197void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6198 HandleBitwiseOperation(instruction);
6199}
6200
6201void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6202 HandleBitwiseOperation(instruction);
6203}
6204
6205void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6206 LocationSummary* locations = instruction->GetLocations();
6207 Location first = locations->InAt(0);
6208 Location second = locations->InAt(1);
6209 DCHECK(first.Equals(locations->Out()));
6210
6211 if (instruction->GetResultType() == Primitive::kPrimInt) {
6212 if (second.IsRegister()) {
6213 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006214 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006215 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006216 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006217 } else {
6218 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006219 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006220 }
6221 } else if (second.IsConstant()) {
6222 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6223 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006224 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006225 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006226 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006227 } else {
6228 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006229 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006230 }
6231 } else {
6232 Address address(CpuRegister(RSP), second.GetStackIndex());
6233 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006234 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006235 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006236 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006237 } else {
6238 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006239 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006240 }
6241 }
6242 } else {
6243 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006244 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6245 bool second_is_constant = false;
6246 int64_t value = 0;
6247 if (second.IsConstant()) {
6248 second_is_constant = true;
6249 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006250 }
Mark Mendell40741f32015-04-20 22:10:34 -04006251 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006252
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006253 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006254 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006255 if (is_int32_value) {
6256 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6257 } else {
6258 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6259 }
6260 } else if (second.IsDoubleStackSlot()) {
6261 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006262 } else {
6263 __ andq(first_reg, second.AsRegister<CpuRegister>());
6264 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006265 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006266 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006267 if (is_int32_value) {
6268 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6269 } else {
6270 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6271 }
6272 } else if (second.IsDoubleStackSlot()) {
6273 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006274 } else {
6275 __ orq(first_reg, second.AsRegister<CpuRegister>());
6276 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006277 } else {
6278 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006279 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006280 if (is_int32_value) {
6281 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6282 } else {
6283 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6284 }
6285 } else if (second.IsDoubleStackSlot()) {
6286 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006287 } else {
6288 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6289 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006290 }
6291 }
6292}
6293
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006294void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6295 Location out,
6296 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006297 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006298 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6299 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006300 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006301 if (kUseBakerReadBarrier) {
6302 // Load with fast path based Baker's read barrier.
6303 // /* HeapReference<Object> */ out = *(out + offset)
6304 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006305 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006306 } else {
6307 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006308 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006309 // in the following move operation, as we will need it for the
6310 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006311 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006312 // /* HeapReference<Object> */ out = *(out + offset)
6313 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006314 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006315 }
6316 } else {
6317 // Plain load with no read barrier.
6318 // /* HeapReference<Object> */ out = *(out + offset)
6319 __ movl(out_reg, Address(out_reg, offset));
6320 __ MaybeUnpoisonHeapReference(out_reg);
6321 }
6322}
6323
6324void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6325 Location out,
6326 Location obj,
6327 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006328 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006329 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6330 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6331 if (kEmitCompilerReadBarrier) {
6332 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006333 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006334 // Load with fast path based Baker's read barrier.
6335 // /* HeapReference<Object> */ out = *(obj + offset)
6336 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006337 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006338 } else {
6339 // Load with slow path based read barrier.
6340 // /* HeapReference<Object> */ out = *(obj + offset)
6341 __ movl(out_reg, Address(obj_reg, offset));
6342 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6343 }
6344 } else {
6345 // Plain load with no read barrier.
6346 // /* HeapReference<Object> */ out = *(obj + offset)
6347 __ movl(out_reg, Address(obj_reg, offset));
6348 __ MaybeUnpoisonHeapReference(out_reg);
6349 }
6350}
6351
6352void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6353 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006354 const Address& address,
6355 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006356 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6357 if (kEmitCompilerReadBarrier) {
6358 if (kUseBakerReadBarrier) {
6359 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6360 // Baker's read barrier are used:
6361 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006362 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006363 // if (Thread::Current()->GetIsGcMarking()) {
6364 // root = ReadBarrier::Mark(root)
6365 // }
6366
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006367 // /* GcRoot<mirror::Object> */ root = *address
6368 __ movl(root_reg, address);
6369 if (fixup_label != nullptr) {
6370 __ Bind(fixup_label);
6371 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006372 static_assert(
6373 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6374 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6375 "have different sizes.");
6376 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6377 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6378 "have different sizes.");
6379
6380 // Slow path used to mark the GC root `root`.
6381 SlowPathCode* slow_path =
6382 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6383 codegen_->AddSlowPath(slow_path);
6384
6385 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6386 /* no_rip */ true),
6387 Immediate(0));
6388 __ j(kNotEqual, slow_path->GetEntryLabel());
6389 __ Bind(slow_path->GetExitLabel());
6390 } else {
6391 // GC root loaded through a slow path for read barriers other
6392 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006393 // /* GcRoot<mirror::Object>* */ root = address
6394 __ leaq(root_reg, address);
6395 if (fixup_label != nullptr) {
6396 __ Bind(fixup_label);
6397 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006398 // /* mirror::Object* */ root = root->Read()
6399 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6400 }
6401 } else {
6402 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006403 // /* GcRoot<mirror::Object> */ root = *address
6404 __ movl(root_reg, address);
6405 if (fixup_label != nullptr) {
6406 __ Bind(fixup_label);
6407 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006408 // Note that GC roots are not affected by heap poisoning, thus we
6409 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006410 }
6411}
6412
6413void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6414 Location ref,
6415 CpuRegister obj,
6416 uint32_t offset,
6417 Location temp,
6418 bool needs_null_check) {
6419 DCHECK(kEmitCompilerReadBarrier);
6420 DCHECK(kUseBakerReadBarrier);
6421
6422 // /* HeapReference<Object> */ ref = *(obj + offset)
6423 Address src(obj, offset);
6424 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6425}
6426
6427void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6428 Location ref,
6429 CpuRegister obj,
6430 uint32_t data_offset,
6431 Location index,
6432 Location temp,
6433 bool needs_null_check) {
6434 DCHECK(kEmitCompilerReadBarrier);
6435 DCHECK(kUseBakerReadBarrier);
6436
6437 // /* HeapReference<Object> */ ref =
6438 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6439 Address src = index.IsConstant() ?
6440 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6441 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6442 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6443}
6444
6445void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6446 Location ref,
6447 CpuRegister obj,
6448 const Address& src,
6449 Location temp,
6450 bool needs_null_check) {
6451 DCHECK(kEmitCompilerReadBarrier);
6452 DCHECK(kUseBakerReadBarrier);
6453
6454 // In slow path based read barriers, the read barrier call is
6455 // inserted after the original load. However, in fast path based
6456 // Baker's read barriers, we need to perform the load of
6457 // mirror::Object::monitor_ *before* the original reference load.
6458 // This load-load ordering is required by the read barrier.
6459 // The fast path/slow path (for Baker's algorithm) should look like:
6460 //
6461 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6462 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6463 // HeapReference<Object> ref = *src; // Original reference load.
6464 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6465 // if (is_gray) {
6466 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6467 // }
6468 //
6469 // Note: the original implementation in ReadBarrier::Barrier is
6470 // slightly more complex as:
6471 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006472 // the high-bits of rb_state, which are expected to be all zeroes
6473 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6474 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006475 // - it performs additional checks that we do not do here for
6476 // performance reasons.
6477
6478 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6479 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6480 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6481
6482 // /* int32_t */ monitor = obj->monitor_
6483 __ movl(temp_reg, Address(obj, monitor_offset));
6484 if (needs_null_check) {
6485 MaybeRecordImplicitNullCheck(instruction);
6486 }
6487 // /* LockWord */ lock_word = LockWord(monitor)
6488 static_assert(sizeof(LockWord) == sizeof(int32_t),
6489 "art::LockWord and int32_t have different sizes.");
6490 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6491 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6492 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6493 static_assert(
6494 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6495 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6496
6497 // Load fence to prevent load-load reordering.
6498 // Note that this is a no-op, thanks to the x86-64 memory model.
6499 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6500
6501 // The actual reference load.
6502 // /* HeapReference<Object> */ ref = *src
6503 __ movl(ref_reg, src);
6504
6505 // Object* ref = ref_addr->AsMirrorPtr()
6506 __ MaybeUnpoisonHeapReference(ref_reg);
6507
6508 // Slow path used to mark the object `ref` when it is gray.
6509 SlowPathCode* slow_path =
6510 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6511 AddSlowPath(slow_path);
6512
6513 // if (rb_state == ReadBarrier::gray_ptr_)
6514 // ref = ReadBarrier::Mark(ref);
6515 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6516 __ j(kEqual, slow_path->GetEntryLabel());
6517 __ Bind(slow_path->GetExitLabel());
6518}
6519
6520void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6521 Location out,
6522 Location ref,
6523 Location obj,
6524 uint32_t offset,
6525 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006526 DCHECK(kEmitCompilerReadBarrier);
6527
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006528 // Insert a slow path based read barrier *after* the reference load.
6529 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006530 // If heap poisoning is enabled, the unpoisoning of the loaded
6531 // reference will be carried out by the runtime within the slow
6532 // path.
6533 //
6534 // Note that `ref` currently does not get unpoisoned (when heap
6535 // poisoning is enabled), which is alright as the `ref` argument is
6536 // not used by the artReadBarrierSlow entry point.
6537 //
6538 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6539 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6540 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6541 AddSlowPath(slow_path);
6542
Roland Levillain0d5a2812015-11-13 10:07:31 +00006543 __ jmp(slow_path->GetEntryLabel());
6544 __ Bind(slow_path->GetExitLabel());
6545}
6546
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006547void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6548 Location out,
6549 Location ref,
6550 Location obj,
6551 uint32_t offset,
6552 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006553 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006554 // Baker's read barriers shall be handled by the fast path
6555 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6556 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006557 // If heap poisoning is enabled, unpoisoning will be taken care of
6558 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006559 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006560 } else if (kPoisonHeapReferences) {
6561 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6562 }
6563}
6564
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006565void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6566 Location out,
6567 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006568 DCHECK(kEmitCompilerReadBarrier);
6569
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006570 // Insert a slow path based read barrier *after* the GC root load.
6571 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006572 // Note that GC roots are not affected by heap poisoning, so we do
6573 // not need to do anything special for this here.
6574 SlowPathCode* slow_path =
6575 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6576 AddSlowPath(slow_path);
6577
Roland Levillain0d5a2812015-11-13 10:07:31 +00006578 __ jmp(slow_path->GetEntryLabel());
6579 __ Bind(slow_path->GetExitLabel());
6580}
6581
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006582void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006583 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006584 LOG(FATAL) << "Unreachable";
6585}
6586
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006587void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006588 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006589 LOG(FATAL) << "Unreachable";
6590}
6591
Mark Mendellfe57faa2015-09-18 09:26:15 -04006592// Simple implementation of packed switch - generate cascaded compare/jumps.
6593void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6594 LocationSummary* locations =
6595 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6596 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006597 locations->AddTemp(Location::RequiresRegister());
6598 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006599}
6600
6601void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6602 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006603 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006604 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006605 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6606 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6607 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006608 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6609
6610 // Should we generate smaller inline compare/jumps?
6611 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6612 // Figure out the correct compare values and jump conditions.
6613 // Handle the first compare/branch as a special case because it might
6614 // jump to the default case.
6615 DCHECK_GT(num_entries, 2u);
6616 Condition first_condition;
6617 uint32_t index;
6618 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6619 if (lower_bound != 0) {
6620 first_condition = kLess;
6621 __ cmpl(value_reg_in, Immediate(lower_bound));
6622 __ j(first_condition, codegen_->GetLabelOf(default_block));
6623 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6624
6625 index = 1;
6626 } else {
6627 // Handle all the compare/jumps below.
6628 first_condition = kBelow;
6629 index = 0;
6630 }
6631
6632 // Handle the rest of the compare/jumps.
6633 for (; index + 1 < num_entries; index += 2) {
6634 int32_t compare_to_value = lower_bound + index + 1;
6635 __ cmpl(value_reg_in, Immediate(compare_to_value));
6636 // Jump to successors[index] if value < case_value[index].
6637 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6638 // Jump to successors[index + 1] if value == case_value[index + 1].
6639 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6640 }
6641
6642 if (index != num_entries) {
6643 // There are an odd number of entries. Handle the last one.
6644 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006645 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006646 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6647 }
6648
6649 // And the default for any other value.
6650 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6651 __ jmp(codegen_->GetLabelOf(default_block));
6652 }
6653 return;
6654 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006655
6656 // Remove the bias, if needed.
6657 Register value_reg_out = value_reg_in.AsRegister();
6658 if (lower_bound != 0) {
6659 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6660 value_reg_out = temp_reg.AsRegister();
6661 }
6662 CpuRegister value_reg(value_reg_out);
6663
6664 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006665 __ cmpl(value_reg, Immediate(num_entries - 1));
6666 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006667
Mark Mendell9c86b482015-09-18 13:36:07 -04006668 // We are in the range of the table.
6669 // Load the address of the jump table in the constant area.
6670 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006671
Mark Mendell9c86b482015-09-18 13:36:07 -04006672 // Load the (signed) offset from the jump table.
6673 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6674
6675 // Add the offset to the address of the table base.
6676 __ addq(temp_reg, base_reg);
6677
6678 // And jump.
6679 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006680}
6681
Aart Bikc5d47542016-01-27 17:00:35 -08006682void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6683 if (value == 0) {
6684 __ xorl(dest, dest);
6685 } else {
6686 __ movl(dest, Immediate(value));
6687 }
6688}
6689
Mark Mendell92e83bf2015-05-07 11:25:03 -04006690void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6691 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006692 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006693 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006694 } else if (IsUint<32>(value)) {
6695 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006696 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6697 } else {
6698 __ movq(dest, Immediate(value));
6699 }
6700}
6701
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006702void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6703 if (value == 0) {
6704 __ xorps(dest, dest);
6705 } else {
6706 __ movss(dest, LiteralInt32Address(value));
6707 }
6708}
6709
6710void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6711 if (value == 0) {
6712 __ xorpd(dest, dest);
6713 } else {
6714 __ movsd(dest, LiteralInt64Address(value));
6715 }
6716}
6717
6718void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6719 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6720}
6721
6722void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6723 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6724}
6725
Aart Bika19616e2016-02-01 18:57:58 -08006726void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6727 if (value == 0) {
6728 __ testl(dest, dest);
6729 } else {
6730 __ cmpl(dest, Immediate(value));
6731 }
6732}
6733
6734void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6735 if (IsInt<32>(value)) {
6736 if (value == 0) {
6737 __ testq(dest, dest);
6738 } else {
6739 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6740 }
6741 } else {
6742 // Value won't fit in an int.
6743 __ cmpq(dest, LiteralInt64Address(value));
6744 }
6745}
6746
Mark Mendellcfa410b2015-05-25 16:02:44 -04006747void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6748 DCHECK(dest.IsDoubleStackSlot());
6749 if (IsInt<32>(value)) {
6750 // Can move directly as an int32 constant.
6751 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6752 Immediate(static_cast<int32_t>(value)));
6753 } else {
6754 Load64BitValue(CpuRegister(TMP), value);
6755 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6756 }
6757}
6758
Mark Mendell9c86b482015-09-18 13:36:07 -04006759/**
6760 * Class to handle late fixup of offsets into constant area.
6761 */
6762class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6763 public:
6764 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6765 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6766
6767 protected:
6768 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6769
6770 CodeGeneratorX86_64* codegen_;
6771
6772 private:
6773 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6774 // Patch the correct offset for the instruction. We use the address of the
6775 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6776 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6777 int32_t relative_position = constant_offset - pos;
6778
6779 // Patch in the right value.
6780 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6781 }
6782
6783 // Location in constant area that the fixup refers to.
6784 size_t offset_into_constant_area_;
6785};
6786
6787/**
6788 t * Class to handle late fixup of offsets to a jump table that will be created in the
6789 * constant area.
6790 */
6791class JumpTableRIPFixup : public RIPFixup {
6792 public:
6793 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6794 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6795
6796 void CreateJumpTable() {
6797 X86_64Assembler* assembler = codegen_->GetAssembler();
6798
6799 // Ensure that the reference to the jump table has the correct offset.
6800 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6801 SetOffset(offset_in_constant_table);
6802
6803 // Compute the offset from the start of the function to this jump table.
6804 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6805
6806 // Populate the jump table with the correct values for the jump table.
6807 int32_t num_entries = switch_instr_->GetNumEntries();
6808 HBasicBlock* block = switch_instr_->GetBlock();
6809 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6810 // The value that we want is the target offset - the position of the table.
6811 for (int32_t i = 0; i < num_entries; i++) {
6812 HBasicBlock* b = successors[i];
6813 Label* l = codegen_->GetLabelOf(b);
6814 DCHECK(l->IsBound());
6815 int32_t offset_to_block = l->Position() - current_table_offset;
6816 assembler->AppendInt32(offset_to_block);
6817 }
6818 }
6819
6820 private:
6821 const HPackedSwitch* switch_instr_;
6822};
6823
Mark Mendellf55c3e02015-03-26 21:07:46 -04006824void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6825 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006826 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006827 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6828 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006829 assembler->Align(4, 0);
6830 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006831
6832 // Populate any jump tables.
6833 for (auto jump_table : fixups_to_jump_tables_) {
6834 jump_table->CreateJumpTable();
6835 }
6836
6837 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006838 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006839 }
6840
6841 // And finish up.
6842 CodeGenerator::Finalize(allocator);
6843}
6844
Mark Mendellf55c3e02015-03-26 21:07:46 -04006845Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6846 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6847 return Address::RIP(fixup);
6848}
6849
6850Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6851 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6852 return Address::RIP(fixup);
6853}
6854
6855Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6856 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6857 return Address::RIP(fixup);
6858}
6859
6860Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6861 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6862 return Address::RIP(fixup);
6863}
6864
Andreas Gampe85b62f22015-09-09 13:15:38 -07006865// TODO: trg as memory.
6866void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6867 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006868 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006869 return;
6870 }
6871
6872 DCHECK_NE(type, Primitive::kPrimVoid);
6873
6874 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6875 if (trg.Equals(return_loc)) {
6876 return;
6877 }
6878
6879 // Let the parallel move resolver take care of all of this.
6880 HParallelMove parallel_move(GetGraph()->GetArena());
6881 parallel_move.AddMove(return_loc, trg, type, nullptr);
6882 GetMoveResolver()->EmitNativeCode(&parallel_move);
6883}
6884
Mark Mendell9c86b482015-09-18 13:36:07 -04006885Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6886 // Create a fixup to be used to create and address the jump table.
6887 JumpTableRIPFixup* table_fixup =
6888 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6889
6890 // We have to populate the jump tables.
6891 fixups_to_jump_tables_.push_back(table_fixup);
6892 return Address::RIP(table_fixup);
6893}
6894
Mark Mendellea5af682015-10-22 17:35:49 -04006895void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6896 const Address& addr_high,
6897 int64_t v,
6898 HInstruction* instruction) {
6899 if (IsInt<32>(v)) {
6900 int32_t v_32 = v;
6901 __ movq(addr_low, Immediate(v_32));
6902 MaybeRecordImplicitNullCheck(instruction);
6903 } else {
6904 // Didn't fit in a register. Do it in pieces.
6905 int32_t low_v = Low32Bits(v);
6906 int32_t high_v = High32Bits(v);
6907 __ movl(addr_low, Immediate(low_v));
6908 MaybeRecordImplicitNullCheck(instruction);
6909 __ movl(addr_high, Immediate(high_v));
6910 }
6911}
6912
Roland Levillain4d027112015-07-01 15:41:14 +01006913#undef __
6914
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006915} // namespace x86_64
6916} // namespace art