blob: 72de3e6e35d7f991ff2cbecb66f89a1bfa1c6ecc [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Calin Juravle175dc732015-08-25 15:42:32 +010056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000197 // We're moving two locations to locations that could overlap, so we need a parallel
198 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100199 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000200 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100201 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000202 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100203 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100204 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100205 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
206 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000207 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
208 instruction_,
209 instruction_->GetDexPc(),
210 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000211 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100212 }
213
Alexandre Rames8158f282015-08-07 10:26:17 +0100214 bool IsFatal() const OVERRIDE { return true; }
215
Alexandre Rames9931f312015-06-19 14:47:01 +0100216 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
217
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100218 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100219 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
220};
221
Andreas Gampe85b62f22015-09-09 13:15:38 -0700222class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100223 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000224 LoadClassSlowPathX86_64(HLoadClass* cls,
225 HInstruction* at,
226 uint32_t dex_pc,
227 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000228 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000229 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
230 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100231
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000232 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000234 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100236
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000237 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000240 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000241 x86_64_codegen->InvokeRuntime(do_clinit_ ?
242 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
243 QUICK_ENTRY_POINT(pInitializeType),
244 at_,
245 dex_pc_,
246 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000247 if (do_clinit_) {
248 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
249 } else {
250 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
251 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000253 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000254 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000255 if (out.IsValid()) {
256 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000257 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 }
259
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000260 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100261 __ jmp(GetExitLabel());
262 }
263
Alexandre Rames9931f312015-06-19 14:47:01 +0100264 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
265
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100266 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000267 // The class this slow path will load.
268 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000270 // The instruction where this slow path is happening.
271 // (Might be the load class or an initialization check).
272 HInstruction* const at_;
273
274 // The dex PC of `at_`.
275 const uint32_t dex_pc_;
276
277 // Whether to initialize the class.
278 const bool do_clinit_;
279
280 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100281};
282
Andreas Gampe85b62f22015-09-09 13:15:38 -0700283class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000284 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000285 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000286
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000287 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 LocationSummary* locations = instruction_->GetLocations();
289 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
290
Roland Levillain0d5a2812015-11-13 10:07:31 +0000291 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000293 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000294
295 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000296 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
297 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000298 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
299 instruction_,
300 instruction_->GetDexPc(),
301 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000302 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000303 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000304 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000305 __ jmp(GetExitLabel());
306 }
307
Alexandre Rames9931f312015-06-19 14:47:01 +0100308 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
309
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000310 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000311 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
312};
313
Andreas Gampe85b62f22015-09-09 13:15:38 -0700314class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000315 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000316 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000317 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000318
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000319 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000320 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100321 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
322 : locations->Out();
323 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000324 DCHECK(instruction_->IsCheckCast()
325 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326
Roland Levillain0d5a2812015-11-13 10:07:31 +0000327 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000328 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000329
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000330 if (!is_fatal_) {
331 SaveLiveRegisters(codegen, locations);
332 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000333
334 // We're moving two locations to locations that could overlap, so we need a parallel
335 // move resolver.
336 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000337 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100338 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000339 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100340 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100341 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100342 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
343 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000344
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000346 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
347 instruction_,
348 dex_pc,
349 this);
350 CheckEntrypointTypes<
351 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000352 } else {
353 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000354 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
355 instruction_,
356 dex_pc,
357 this);
358 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000360
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000361 if (!is_fatal_) {
362 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000363 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000364 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000365
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000366 RestoreLiveRegisters(codegen, locations);
367 __ jmp(GetExitLabel());
368 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000369 }
370
Alexandre Rames9931f312015-06-19 14:47:01 +0100371 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
372
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000373 bool IsFatal() const OVERRIDE { return is_fatal_; }
374
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000375 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000376 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000377
378 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
379};
380
Andreas Gampe85b62f22015-09-09 13:15:38 -0700381class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700382 public:
Aart Bik42249c32016-01-07 15:33:50 -0800383 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000384 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700385
386 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000387 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700388 __ Bind(GetEntryLabel());
389 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000390 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800391 instruction_,
392 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000393 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000394 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 }
396
Alexandre Rames9931f312015-06-19 14:47:01 +0100397 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
398
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700400 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
401};
402
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100403class ArraySetSlowPathX86_64 : public SlowPathCode {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 LocationSummary* locations = instruction_->GetLocations();
409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, locations);
411
412 InvokeRuntimeCallingConvention calling_convention;
413 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
414 parallel_move.AddMove(
415 locations->InAt(0),
416 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
417 Primitive::kPrimNot,
418 nullptr);
419 parallel_move.AddMove(
420 locations->InAt(1),
421 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
422 Primitive::kPrimInt,
423 nullptr);
424 parallel_move.AddMove(
425 locations->InAt(2),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
427 Primitive::kPrimNot,
428 nullptr);
429 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
430
Roland Levillain0d5a2812015-11-13 10:07:31 +0000431 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
432 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
433 instruction_,
434 instruction_->GetDexPc(),
435 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000436 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100437 RestoreLiveRegisters(codegen, locations);
438 __ jmp(GetExitLabel());
439 }
440
441 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
442
443 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
445};
446
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000447// Slow path marking an object during a read barrier.
448class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
449 public:
450 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000451 : SlowPathCode(instruction), out_(out), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000452 DCHECK(kEmitCompilerReadBarrier);
453 }
454
455 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
456
457 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
458 LocationSummary* locations = instruction_->GetLocations();
459 Register reg_out = out_.AsRegister<Register>();
460 DCHECK(locations->CanCall());
461 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
462 DCHECK(instruction_->IsInstanceFieldGet() ||
463 instruction_->IsStaticFieldGet() ||
464 instruction_->IsArrayGet() ||
465 instruction_->IsLoadClass() ||
466 instruction_->IsLoadString() ||
467 instruction_->IsInstanceOf() ||
468 instruction_->IsCheckCast())
469 << "Unexpected instruction in read barrier marking slow path: "
470 << instruction_->DebugName();
471
472 __ Bind(GetEntryLabel());
473 SaveLiveRegisters(codegen, locations);
474
475 InvokeRuntimeCallingConvention calling_convention;
476 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
477 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
478 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
479 instruction_,
480 instruction_->GetDexPc(),
481 this);
482 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
483 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
484
485 RestoreLiveRegisters(codegen, locations);
486 __ jmp(GetExitLabel());
487 }
488
489 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000490 const Location out_;
491 const Location obj_;
492
493 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
494};
495
Roland Levillain0d5a2812015-11-13 10:07:31 +0000496// Slow path generating a read barrier for a heap reference.
497class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
498 public:
499 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
500 Location out,
501 Location ref,
502 Location obj,
503 uint32_t offset,
504 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000505 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000506 out_(out),
507 ref_(ref),
508 obj_(obj),
509 offset_(offset),
510 index_(index) {
511 DCHECK(kEmitCompilerReadBarrier);
512 // If `obj` is equal to `out` or `ref`, it means the initial
513 // object has been overwritten by (or after) the heap object
514 // reference load to be instrumented, e.g.:
515 //
516 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000517 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000518 //
519 // In that case, we have lost the information about the original
520 // object, and the emitted read barrier cannot work properly.
521 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
522 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
523}
524
525 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
526 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
527 LocationSummary* locations = instruction_->GetLocations();
528 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
529 DCHECK(locations->CanCall());
530 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
531 DCHECK(!instruction_->IsInvoke() ||
532 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000533 instruction_->GetLocations()->Intrinsified()))
534 << "Unexpected instruction in read barrier for heap reference slow path: "
535 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000536
537 __ Bind(GetEntryLabel());
538 SaveLiveRegisters(codegen, locations);
539
540 // We may have to change the index's value, but as `index_` is a
541 // constant member (like other "inputs" of this slow path),
542 // introduce a copy of it, `index`.
543 Location index = index_;
544 if (index_.IsValid()) {
545 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
546 if (instruction_->IsArrayGet()) {
547 // Compute real offset and store it in index_.
548 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
549 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
550 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
551 // We are about to change the value of `index_reg` (see the
552 // calls to art::x86_64::X86_64Assembler::shll and
553 // art::x86_64::X86_64Assembler::AddImmediate below), but it
554 // has not been saved by the previous call to
555 // art::SlowPathCode::SaveLiveRegisters, as it is a
556 // callee-save register --
557 // art::SlowPathCode::SaveLiveRegisters does not consider
558 // callee-save registers, as it has been designed with the
559 // assumption that callee-save registers are supposed to be
560 // handled by the called function. So, as a callee-save
561 // register, `index_reg` _would_ eventually be saved onto
562 // the stack, but it would be too late: we would have
563 // changed its value earlier. Therefore, we manually save
564 // it here into another freely available register,
565 // `free_reg`, chosen of course among the caller-save
566 // registers (as a callee-save `free_reg` register would
567 // exhibit the same problem).
568 //
569 // Note we could have requested a temporary register from
570 // the register allocator instead; but we prefer not to, as
571 // this is a slow path, and we know we can find a
572 // caller-save register that is available.
573 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
574 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
575 index_reg = free_reg;
576 index = Location::RegisterLocation(index_reg);
577 } else {
578 // The initial register stored in `index_` has already been
579 // saved in the call to art::SlowPathCode::SaveLiveRegisters
580 // (as it is not a callee-save register), so we can freely
581 // use it.
582 }
583 // Shifting the index value contained in `index_reg` by the
584 // scale factor (2) cannot overflow in practice, as the
585 // runtime is unable to allocate object arrays with a size
586 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
587 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
588 static_assert(
589 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
590 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
591 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
592 } else {
593 DCHECK(instruction_->IsInvoke());
594 DCHECK(instruction_->GetLocations()->Intrinsified());
595 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
596 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
597 << instruction_->AsInvoke()->GetIntrinsic();
598 DCHECK_EQ(offset_, 0U);
599 DCHECK(index_.IsRegister());
600 }
601 }
602
603 // We're moving two or three locations to locations that could
604 // overlap, so we need a parallel move resolver.
605 InvokeRuntimeCallingConvention calling_convention;
606 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
607 parallel_move.AddMove(ref_,
608 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
609 Primitive::kPrimNot,
610 nullptr);
611 parallel_move.AddMove(obj_,
612 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
613 Primitive::kPrimNot,
614 nullptr);
615 if (index.IsValid()) {
616 parallel_move.AddMove(index,
617 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
618 Primitive::kPrimInt,
619 nullptr);
620 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
621 } else {
622 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
623 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
624 }
625 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
626 instruction_,
627 instruction_->GetDexPc(),
628 this);
629 CheckEntrypointTypes<
630 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
631 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
632
633 RestoreLiveRegisters(codegen, locations);
634 __ jmp(GetExitLabel());
635 }
636
637 const char* GetDescription() const OVERRIDE {
638 return "ReadBarrierForHeapReferenceSlowPathX86_64";
639 }
640
641 private:
642 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
643 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
644 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
645 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
646 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
647 return static_cast<CpuRegister>(i);
648 }
649 }
650 // We shall never fail to find a free caller-save register, as
651 // there are more than two core caller-save registers on x86-64
652 // (meaning it is possible to find one which is different from
653 // `ref` and `obj`).
654 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
655 LOG(FATAL) << "Could not find a free caller-save register";
656 UNREACHABLE();
657 }
658
Roland Levillain0d5a2812015-11-13 10:07:31 +0000659 const Location out_;
660 const Location ref_;
661 const Location obj_;
662 const uint32_t offset_;
663 // An additional location containing an index to an array.
664 // Only used for HArrayGet and the UnsafeGetObject &
665 // UnsafeGetObjectVolatile intrinsics.
666 const Location index_;
667
668 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
669};
670
671// Slow path generating a read barrier for a GC root.
672class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
673 public:
674 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000675 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000676 DCHECK(kEmitCompilerReadBarrier);
677 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000678
679 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
680 LocationSummary* locations = instruction_->GetLocations();
681 DCHECK(locations->CanCall());
682 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000683 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
684 << "Unexpected instruction in read barrier for GC root slow path: "
685 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000686
687 __ Bind(GetEntryLabel());
688 SaveLiveRegisters(codegen, locations);
689
690 InvokeRuntimeCallingConvention calling_convention;
691 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
692 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
693 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
694 instruction_,
695 instruction_->GetDexPc(),
696 this);
697 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
698 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
699
700 RestoreLiveRegisters(codegen, locations);
701 __ jmp(GetExitLabel());
702 }
703
704 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
705
706 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000707 const Location out_;
708 const Location root_;
709
710 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
711};
712
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100713#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700714// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
715#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100716
Roland Levillain4fa13f62015-07-06 18:11:54 +0100717inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700718 switch (cond) {
719 case kCondEQ: return kEqual;
720 case kCondNE: return kNotEqual;
721 case kCondLT: return kLess;
722 case kCondLE: return kLessEqual;
723 case kCondGT: return kGreater;
724 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700725 case kCondB: return kBelow;
726 case kCondBE: return kBelowEqual;
727 case kCondA: return kAbove;
728 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700729 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100730 LOG(FATAL) << "Unreachable";
731 UNREACHABLE();
732}
733
Aart Bike9f37602015-10-09 11:15:55 -0700734// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100735inline Condition X86_64FPCondition(IfCondition cond) {
736 switch (cond) {
737 case kCondEQ: return kEqual;
738 case kCondNE: return kNotEqual;
739 case kCondLT: return kBelow;
740 case kCondLE: return kBelowEqual;
741 case kCondGT: return kAbove;
742 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700743 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100744 };
745 LOG(FATAL) << "Unreachable";
746 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700747}
748
Vladimir Markodc151b22015-10-15 18:02:30 +0100749HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
750 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
751 MethodReference target_method ATTRIBUTE_UNUSED) {
752 switch (desired_dispatch_info.code_ptr_location) {
753 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
754 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
755 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
756 return HInvokeStaticOrDirect::DispatchInfo {
757 desired_dispatch_info.method_load_kind,
758 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
759 desired_dispatch_info.method_load_data,
760 0u
761 };
762 default:
763 return desired_dispatch_info;
764 }
765}
766
Serguei Katkov288c7a82016-05-16 11:53:15 +0600767Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
768 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800769 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000770 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
771 switch (invoke->GetMethodLoadKind()) {
772 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
773 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000774 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000775 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000776 break;
777 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000778 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000779 break;
780 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
781 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
782 break;
783 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
784 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
785 method_patches_.emplace_back(invoke->GetTargetMethod());
786 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
787 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000788 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000789 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000790 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000791 // Bind a new fixup label at the end of the "movl" insn.
792 uint32_t offset = invoke->GetDexCacheArrayOffset();
793 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000794 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000795 }
Vladimir Marko58155012015-08-19 12:49:41 +0000796 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000797 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000798 Register method_reg;
799 CpuRegister reg = temp.AsRegister<CpuRegister>();
800 if (current_method.IsRegister()) {
801 method_reg = current_method.AsRegister<Register>();
802 } else {
803 DCHECK(invoke->GetLocations()->Intrinsified());
804 DCHECK(!current_method.IsValid());
805 method_reg = reg.AsRegister();
806 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
807 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000808 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100809 __ movq(reg,
810 Address(CpuRegister(method_reg),
811 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100812 // temp = temp[index_in_cache];
813 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
814 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000815 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
816 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100817 }
Vladimir Marko58155012015-08-19 12:49:41 +0000818 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600819 return callee_method;
820}
821
822void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
823 Location temp) {
824 // All registers are assumed to be correctly set up.
825 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000826
827 switch (invoke->GetCodePtrLocation()) {
828 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
829 __ call(&frame_entry_label_);
830 break;
831 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
832 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
833 Label* label = &relative_call_patches_.back().label;
834 __ call(label); // Bind to the patch label, override at link time.
835 __ Bind(label); // Bind the label at the end of the "call" insn.
836 break;
837 }
838 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
839 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100840 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
841 LOG(FATAL) << "Unsupported";
842 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000843 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
844 // (callee_method + offset_of_quick_compiled_code)()
845 __ call(Address(callee_method.AsRegister<CpuRegister>(),
846 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
847 kX86_64WordSize).SizeValue()));
848 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000849 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800850
851 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800852}
853
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000854void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
855 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
856 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
857 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000858
859 // Use the calling convention instead of the location of the receiver, as
860 // intrinsics may have put the receiver in a different register. In the intrinsics
861 // slow path, the arguments have been moved to the right place, so here we are
862 // guaranteed that the receiver is the first register of the calling convention.
863 InvokeDexCallingConvention calling_convention;
864 Register receiver = calling_convention.GetRegisterAt(0);
865
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000866 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000867 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000868 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000869 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 // Instead of simply (possibly) unpoisoning `temp` here, we should
871 // emit a read barrier for the previous class reference load.
872 // However this is not required in practice, as this is an
873 // intermediate/temporary reference and because the current
874 // concurrent copying collector keeps the from-space memory
875 // intact/accessible until the end of the marking phase (the
876 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000877 __ MaybeUnpoisonHeapReference(temp);
878 // temp = temp->GetMethodAt(method_offset);
879 __ movq(temp, Address(temp, method_offset));
880 // call temp->GetEntryPoint();
881 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
882 kX86_64WordSize).SizeValue()));
883}
884
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000885void CodeGeneratorX86_64::RecordSimplePatch() {
886 if (GetCompilerOptions().GetIncludePatchInformation()) {
887 simple_patches_.emplace_back();
888 __ Bind(&simple_patches_.back());
889 }
890}
891
892void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
893 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
894 __ Bind(&string_patches_.back().label);
895}
896
897Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
898 uint32_t element_offset) {
899 // Add a patch entry and return the label.
900 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
901 return &pc_relative_dex_cache_patches_.back().label;
902}
903
Vladimir Marko58155012015-08-19 12:49:41 +0000904void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
905 DCHECK(linker_patches->empty());
906 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000907 method_patches_.size() +
908 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000909 pc_relative_dex_cache_patches_.size() +
910 simple_patches_.size() +
911 string_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000912 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000913 // The label points to the end of the "movl" insn but the literal offset for method
914 // patch needs to point to the embedded constant which occupies the last 4 bytes.
915 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000916 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000917 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000918 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
919 info.target_method.dex_file,
920 info.target_method.dex_method_index));
921 }
922 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000923 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000924 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
925 info.target_method.dex_file,
926 info.target_method.dex_method_index));
927 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000928 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
929 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000930 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
931 &info.target_dex_file,
932 info.label.Position(),
933 info.element_offset));
934 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000935 for (const Label& label : simple_patches_) {
936 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
937 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
938 }
939 for (const StringPatchInfo<Label>& info : string_patches_) {
940 // These are always PC-relative, see GetSupportedLoadStringKind().
941 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
942 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
943 &info.dex_file,
944 info.label.Position(),
945 info.string_index));
946 }
Vladimir Marko58155012015-08-19 12:49:41 +0000947}
948
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100949void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100950 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100951}
952
953void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100954 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100955}
956
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100957size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
958 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
959 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100960}
961
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100962size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
963 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
964 return kX86_64WordSize;
965}
966
967size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
968 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
969 return kX86_64WordSize;
970}
971
972size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
973 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
974 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100975}
976
Calin Juravle175dc732015-08-25 15:42:32 +0100977void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
978 HInstruction* instruction,
979 uint32_t dex_pc,
980 SlowPathCode* slow_path) {
981 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
982 instruction,
983 dex_pc,
984 slow_path);
985}
986
987void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100988 HInstruction* instruction,
989 uint32_t dex_pc,
990 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100991 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000992 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100993 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100994}
995
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000996static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000997// Use a fake return address register to mimic Quick.
998static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400999CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001000 const X86_64InstructionSetFeatures& isa_features,
1001 const CompilerOptions& compiler_options,
1002 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001003 : CodeGenerator(graph,
1004 kNumberOfCpuRegisters,
1005 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001006 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001007 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1008 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001009 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001010 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1011 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001012 compiler_options,
1013 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001014 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001015 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001016 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001017 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001018 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001019 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001020 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001021 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1022 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001023 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001024 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1025 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001026 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001027 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1028}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001029
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001030InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1031 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001032 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001033 assembler_(codegen->GetAssembler()),
1034 codegen_(codegen) {}
1035
David Brazdil58282f42016-01-14 12:45:10 +00001036void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001037 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001038 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001039
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001040 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001041 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001042}
1043
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001044static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001045 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001046}
David Srbecky9d8606d2015-04-12 09:35:32 +01001047
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001048static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001049 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001050}
1051
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001052void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001053 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001054 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001055 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001056 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001057 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001058
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001059 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001060 __ testq(CpuRegister(RAX), Address(
1061 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001062 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001063 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001064
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001065 if (HasEmptyFrame()) {
1066 return;
1067 }
1068
Nicolas Geoffray98893962015-01-21 12:32:32 +00001069 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001070 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001071 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001072 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001073 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1074 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001075 }
1076 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001077
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001078 int adjust = GetFrameSize() - GetCoreSpillSize();
1079 __ subq(CpuRegister(RSP), Immediate(adjust));
1080 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001081 uint32_t xmm_spill_location = GetFpuSpillStart();
1082 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001083
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001084 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1085 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001086 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1087 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1088 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001089 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001090 }
1091
Mathieu Chartiere401d142015-04-22 13:56:20 -07001092 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001093 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001094}
1095
1096void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001097 __ cfi().RememberState();
1098 if (!HasEmptyFrame()) {
1099 uint32_t xmm_spill_location = GetFpuSpillStart();
1100 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1101 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1102 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1103 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1104 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1105 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1106 }
1107 }
1108
1109 int adjust = GetFrameSize() - GetCoreSpillSize();
1110 __ addq(CpuRegister(RSP), Immediate(adjust));
1111 __ cfi().AdjustCFAOffset(-adjust);
1112
1113 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1114 Register reg = kCoreCalleeSaves[i];
1115 if (allocated_registers_.ContainsCoreRegister(reg)) {
1116 __ popq(CpuRegister(reg));
1117 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1118 __ cfi().Restore(DWARFReg(reg));
1119 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001120 }
1121 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001122 __ ret();
1123 __ cfi().RestoreState();
1124 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001125}
1126
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001127void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1128 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001129}
1130
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001131void CodeGeneratorX86_64::Move(Location destination, Location source) {
1132 if (source.Equals(destination)) {
1133 return;
1134 }
1135 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001136 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001137 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001138 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001139 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001140 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001141 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001142 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1143 } else if (source.IsConstant()) {
1144 HConstant* constant = source.GetConstant();
1145 if (constant->IsLongConstant()) {
1146 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1147 } else {
1148 Load32BitValue(dest, GetInt32ValueOf(constant));
1149 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001150 } else {
1151 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001152 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001153 }
1154 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001155 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001156 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001157 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001158 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001159 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1160 } else if (source.IsConstant()) {
1161 HConstant* constant = source.GetConstant();
1162 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1163 if (constant->IsFloatConstant()) {
1164 Load32BitValue(dest, static_cast<int32_t>(value));
1165 } else {
1166 Load64BitValue(dest, value);
1167 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001168 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001169 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001170 } else {
1171 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001172 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001173 }
1174 } else if (destination.IsStackSlot()) {
1175 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001176 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001177 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001178 } else if (source.IsFpuRegister()) {
1179 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001180 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001181 } else if (source.IsConstant()) {
1182 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001183 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001184 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001185 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001186 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001187 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1188 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001189 }
1190 } else {
1191 DCHECK(destination.IsDoubleStackSlot());
1192 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001193 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001194 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001195 } else if (source.IsFpuRegister()) {
1196 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001197 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001198 } else if (source.IsConstant()) {
1199 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001200 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001201 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001202 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001203 } else {
1204 DCHECK(constant->IsLongConstant());
1205 value = constant->AsLongConstant()->GetValue();
1206 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001207 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001208 } else {
1209 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001210 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1211 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001212 }
1213 }
1214}
1215
Calin Juravle175dc732015-08-25 15:42:32 +01001216void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1217 DCHECK(location.IsRegister());
1218 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1219}
1220
Calin Juravlee460d1d2015-09-29 04:52:17 +01001221void CodeGeneratorX86_64::MoveLocation(
1222 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1223 Move(dst, src);
1224}
1225
1226void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1227 if (location.IsRegister()) {
1228 locations->AddTemp(location);
1229 } else {
1230 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1231 }
1232}
1233
David Brazdilfc6a86a2015-06-26 10:33:45 +00001234void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001235 DCHECK(!successor->IsExitBlock());
1236
1237 HBasicBlock* block = got->GetBlock();
1238 HInstruction* previous = got->GetPrevious();
1239
1240 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001241 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001242 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1243 return;
1244 }
1245
1246 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1247 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1248 }
1249 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001250 __ jmp(codegen_->GetLabelOf(successor));
1251 }
1252}
1253
David Brazdilfc6a86a2015-06-26 10:33:45 +00001254void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1255 got->SetLocations(nullptr);
1256}
1257
1258void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1259 HandleGoto(got, got->GetSuccessor());
1260}
1261
1262void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1263 try_boundary->SetLocations(nullptr);
1264}
1265
1266void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1267 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1268 if (!successor->IsExitBlock()) {
1269 HandleGoto(try_boundary, successor);
1270 }
1271}
1272
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001273void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1274 exit->SetLocations(nullptr);
1275}
1276
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001277void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001278}
1279
Mark Mendell152408f2015-12-31 12:28:50 -05001280template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001281void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001282 LabelType* true_label,
1283 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001284 if (cond->IsFPConditionTrueIfNaN()) {
1285 __ j(kUnordered, true_label);
1286 } else if (cond->IsFPConditionFalseIfNaN()) {
1287 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001288 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001289 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001290}
1291
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001292void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001293 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001294
Mark Mendellc4701932015-04-10 13:18:51 -04001295 Location left = locations->InAt(0);
1296 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001297 Primitive::Type type = condition->InputAt(0)->GetType();
1298 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001299 case Primitive::kPrimBoolean:
1300 case Primitive::kPrimByte:
1301 case Primitive::kPrimChar:
1302 case Primitive::kPrimShort:
1303 case Primitive::kPrimInt:
1304 case Primitive::kPrimNot: {
1305 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1306 if (right.IsConstant()) {
1307 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1308 if (value == 0) {
1309 __ testl(left_reg, left_reg);
1310 } else {
1311 __ cmpl(left_reg, Immediate(value));
1312 }
1313 } else if (right.IsStackSlot()) {
1314 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1315 } else {
1316 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1317 }
1318 break;
1319 }
Mark Mendellc4701932015-04-10 13:18:51 -04001320 case Primitive::kPrimLong: {
1321 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1322 if (right.IsConstant()) {
1323 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001324 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001325 } else if (right.IsDoubleStackSlot()) {
1326 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1327 } else {
1328 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1329 }
Mark Mendellc4701932015-04-10 13:18:51 -04001330 break;
1331 }
1332 case Primitive::kPrimFloat: {
1333 if (right.IsFpuRegister()) {
1334 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1335 } else if (right.IsConstant()) {
1336 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1337 codegen_->LiteralFloatAddress(
1338 right.GetConstant()->AsFloatConstant()->GetValue()));
1339 } else {
1340 DCHECK(right.IsStackSlot());
1341 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1342 Address(CpuRegister(RSP), right.GetStackIndex()));
1343 }
Mark Mendellc4701932015-04-10 13:18:51 -04001344 break;
1345 }
1346 case Primitive::kPrimDouble: {
1347 if (right.IsFpuRegister()) {
1348 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1349 } else if (right.IsConstant()) {
1350 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1351 codegen_->LiteralDoubleAddress(
1352 right.GetConstant()->AsDoubleConstant()->GetValue()));
1353 } else {
1354 DCHECK(right.IsDoubleStackSlot());
1355 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1356 Address(CpuRegister(RSP), right.GetStackIndex()));
1357 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001358 break;
1359 }
1360 default:
1361 LOG(FATAL) << "Unexpected condition type " << type;
1362 }
1363}
1364
1365template<class LabelType>
1366void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1367 LabelType* true_target_in,
1368 LabelType* false_target_in) {
1369 // Generated branching requires both targets to be explicit. If either of the
1370 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1371 LabelType fallthrough_target;
1372 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1373 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1374
1375 // Generate the comparison to set the CC.
1376 GenerateCompareTest(condition);
1377
1378 // Now generate the correct jump(s).
1379 Primitive::Type type = condition->InputAt(0)->GetType();
1380 switch (type) {
1381 case Primitive::kPrimLong: {
1382 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1383 break;
1384 }
1385 case Primitive::kPrimFloat: {
1386 GenerateFPJumps(condition, true_target, false_target);
1387 break;
1388 }
1389 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001390 GenerateFPJumps(condition, true_target, false_target);
1391 break;
1392 }
1393 default:
1394 LOG(FATAL) << "Unexpected condition type " << type;
1395 }
1396
David Brazdil0debae72015-11-12 18:37:00 +00001397 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001398 __ jmp(false_target);
1399 }
David Brazdil0debae72015-11-12 18:37:00 +00001400
1401 if (fallthrough_target.IsLinked()) {
1402 __ Bind(&fallthrough_target);
1403 }
Mark Mendellc4701932015-04-10 13:18:51 -04001404}
1405
David Brazdil0debae72015-11-12 18:37:00 +00001406static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1407 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1408 // are set only strictly before `branch`. We can't use the eflags on long
1409 // conditions if they are materialized due to the complex branching.
1410 return cond->IsCondition() &&
1411 cond->GetNext() == branch &&
1412 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1413}
1414
Mark Mendell152408f2015-12-31 12:28:50 -05001415template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001416void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001417 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001418 LabelType* true_target,
1419 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001420 HInstruction* cond = instruction->InputAt(condition_input_index);
1421
1422 if (true_target == nullptr && false_target == nullptr) {
1423 // Nothing to do. The code always falls through.
1424 return;
1425 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001426 // Constant condition, statically compared against "true" (integer value 1).
1427 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001428 if (true_target != nullptr) {
1429 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001430 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001431 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001432 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001433 if (false_target != nullptr) {
1434 __ jmp(false_target);
1435 }
1436 }
1437 return;
1438 }
1439
1440 // The following code generates these patterns:
1441 // (1) true_target == nullptr && false_target != nullptr
1442 // - opposite condition true => branch to false_target
1443 // (2) true_target != nullptr && false_target == nullptr
1444 // - condition true => branch to true_target
1445 // (3) true_target != nullptr && false_target != nullptr
1446 // - condition true => branch to true_target
1447 // - branch to false_target
1448 if (IsBooleanValueOrMaterializedCondition(cond)) {
1449 if (AreEflagsSetFrom(cond, instruction)) {
1450 if (true_target == nullptr) {
1451 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1452 } else {
1453 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1454 }
1455 } else {
1456 // Materialized condition, compare against 0.
1457 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1458 if (lhs.IsRegister()) {
1459 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1460 } else {
1461 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1462 }
1463 if (true_target == nullptr) {
1464 __ j(kEqual, false_target);
1465 } else {
1466 __ j(kNotEqual, true_target);
1467 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001468 }
1469 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001470 // Condition has not been materialized, use its inputs as the
1471 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001472 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001473
David Brazdil0debae72015-11-12 18:37:00 +00001474 // If this is a long or FP comparison that has been folded into
1475 // the HCondition, generate the comparison directly.
1476 Primitive::Type type = condition->InputAt(0)->GetType();
1477 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1478 GenerateCompareTestAndBranch(condition, true_target, false_target);
1479 return;
1480 }
1481
1482 Location lhs = condition->GetLocations()->InAt(0);
1483 Location rhs = condition->GetLocations()->InAt(1);
1484 if (rhs.IsRegister()) {
1485 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1486 } else if (rhs.IsConstant()) {
1487 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001488 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001489 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001490 __ cmpl(lhs.AsRegister<CpuRegister>(),
1491 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1492 }
1493 if (true_target == nullptr) {
1494 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1495 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001496 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001497 }
Dave Allison20dfc792014-06-16 20:44:29 -07001498 }
David Brazdil0debae72015-11-12 18:37:00 +00001499
1500 // If neither branch falls through (case 3), the conditional branch to `true_target`
1501 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1502 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001503 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001504 }
1505}
1506
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001507void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001508 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1509 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001510 locations->SetInAt(0, Location::Any());
1511 }
1512}
1513
1514void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001515 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1516 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1517 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1518 nullptr : codegen_->GetLabelOf(true_successor);
1519 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1520 nullptr : codegen_->GetLabelOf(false_successor);
1521 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001522}
1523
1524void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1525 LocationSummary* locations = new (GetGraph()->GetArena())
1526 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001527 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001528 locations->SetInAt(0, Location::Any());
1529 }
1530}
1531
1532void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001533 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001534 GenerateTestAndBranch<Label>(deoptimize,
1535 /* condition_input_index */ 0,
1536 slow_path->GetEntryLabel(),
1537 /* false_target */ nullptr);
1538}
1539
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001540static bool SelectCanUseCMOV(HSelect* select) {
1541 // There are no conditional move instructions for XMMs.
1542 if (Primitive::IsFloatingPointType(select->GetType())) {
1543 return false;
1544 }
1545
1546 // A FP condition doesn't generate the single CC that we need.
1547 HInstruction* condition = select->GetCondition();
1548 if (condition->IsCondition() &&
1549 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1550 return false;
1551 }
1552
1553 // We can generate a CMOV for this Select.
1554 return true;
1555}
1556
David Brazdil74eb1b22015-12-14 11:44:01 +00001557void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1558 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1559 if (Primitive::IsFloatingPointType(select->GetType())) {
1560 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001561 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001562 } else {
1563 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001564 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001565 if (select->InputAt(1)->IsConstant()) {
1566 locations->SetInAt(1, Location::RequiresRegister());
1567 } else {
1568 locations->SetInAt(1, Location::Any());
1569 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001570 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001571 locations->SetInAt(1, Location::Any());
1572 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001573 }
1574 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1575 locations->SetInAt(2, Location::RequiresRegister());
1576 }
1577 locations->SetOut(Location::SameAsFirstInput());
1578}
1579
1580void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1581 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001582 if (SelectCanUseCMOV(select)) {
1583 // If both the condition and the source types are integer, we can generate
1584 // a CMOV to implement Select.
1585 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001586 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001587 DCHECK(locations->InAt(0).Equals(locations->Out()));
1588
1589 HInstruction* select_condition = select->GetCondition();
1590 Condition cond = kNotEqual;
1591
1592 // Figure out how to test the 'condition'.
1593 if (select_condition->IsCondition()) {
1594 HCondition* condition = select_condition->AsCondition();
1595 if (!condition->IsEmittedAtUseSite()) {
1596 // This was a previously materialized condition.
1597 // Can we use the existing condition code?
1598 if (AreEflagsSetFrom(condition, select)) {
1599 // Materialization was the previous instruction. Condition codes are right.
1600 cond = X86_64IntegerCondition(condition->GetCondition());
1601 } else {
1602 // No, we have to recreate the condition code.
1603 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1604 __ testl(cond_reg, cond_reg);
1605 }
1606 } else {
1607 GenerateCompareTest(condition);
1608 cond = X86_64IntegerCondition(condition->GetCondition());
1609 }
1610 } else {
1611 // Must be a boolean condition, which needs to be compared to 0.
1612 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1613 __ testl(cond_reg, cond_reg);
1614 }
1615
1616 // If the condition is true, overwrite the output, which already contains false.
1617 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001618 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1619 if (value_true_loc.IsRegister()) {
1620 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1621 } else {
1622 __ cmov(cond,
1623 value_false,
1624 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1625 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001626 } else {
1627 NearLabel false_target;
1628 GenerateTestAndBranch<NearLabel>(select,
1629 /* condition_input_index */ 2,
1630 /* true_target */ nullptr,
1631 &false_target);
1632 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1633 __ Bind(&false_target);
1634 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001635}
1636
David Srbecky0cf44932015-12-09 14:09:59 +00001637void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1638 new (GetGraph()->GetArena()) LocationSummary(info);
1639}
1640
David Srbeckyd28f4a02016-03-14 17:14:24 +00001641void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1642 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001643}
1644
1645void CodeGeneratorX86_64::GenerateNop() {
1646 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001647}
1648
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001649void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001650 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001651 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001652 // Handle the long/FP comparisons made in instruction simplification.
1653 switch (cond->InputAt(0)->GetType()) {
1654 case Primitive::kPrimLong:
1655 locations->SetInAt(0, Location::RequiresRegister());
1656 locations->SetInAt(1, Location::Any());
1657 break;
1658 case Primitive::kPrimFloat:
1659 case Primitive::kPrimDouble:
1660 locations->SetInAt(0, Location::RequiresFpuRegister());
1661 locations->SetInAt(1, Location::Any());
1662 break;
1663 default:
1664 locations->SetInAt(0, Location::RequiresRegister());
1665 locations->SetInAt(1, Location::Any());
1666 break;
1667 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001668 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001669 locations->SetOut(Location::RequiresRegister());
1670 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001671}
1672
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001673void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001674 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001675 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001676 }
Mark Mendellc4701932015-04-10 13:18:51 -04001677
1678 LocationSummary* locations = cond->GetLocations();
1679 Location lhs = locations->InAt(0);
1680 Location rhs = locations->InAt(1);
1681 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001682 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001683
1684 switch (cond->InputAt(0)->GetType()) {
1685 default:
1686 // Integer case.
1687
1688 // Clear output register: setcc only sets the low byte.
1689 __ xorl(reg, reg);
1690
1691 if (rhs.IsRegister()) {
1692 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1693 } else if (rhs.IsConstant()) {
1694 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001695 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001696 } else {
1697 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1698 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001699 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001700 return;
1701 case Primitive::kPrimLong:
1702 // Clear output register: setcc only sets the low byte.
1703 __ xorl(reg, reg);
1704
1705 if (rhs.IsRegister()) {
1706 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1707 } else if (rhs.IsConstant()) {
1708 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001709 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001710 } else {
1711 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1712 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001713 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001714 return;
1715 case Primitive::kPrimFloat: {
1716 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1717 if (rhs.IsConstant()) {
1718 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1719 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1720 } else if (rhs.IsStackSlot()) {
1721 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1722 } else {
1723 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1724 }
1725 GenerateFPJumps(cond, &true_label, &false_label);
1726 break;
1727 }
1728 case Primitive::kPrimDouble: {
1729 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1730 if (rhs.IsConstant()) {
1731 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1732 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1733 } else if (rhs.IsDoubleStackSlot()) {
1734 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1735 } else {
1736 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1737 }
1738 GenerateFPJumps(cond, &true_label, &false_label);
1739 break;
1740 }
1741 }
1742
1743 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001744 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001745
Roland Levillain4fa13f62015-07-06 18:11:54 +01001746 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001747 __ Bind(&false_label);
1748 __ xorl(reg, reg);
1749 __ jmp(&done_label);
1750
Roland Levillain4fa13f62015-07-06 18:11:54 +01001751 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001752 __ Bind(&true_label);
1753 __ movl(reg, Immediate(1));
1754 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001755}
1756
1757void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001758 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001759}
1760
1761void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001762 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001763}
1764
1765void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001766 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001767}
1768
1769void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001770 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001771}
1772
1773void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001774 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001775}
1776
1777void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001778 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001779}
1780
1781void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001782 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001783}
1784
1785void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001786 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001787}
1788
1789void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001790 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001791}
1792
1793void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001794 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001795}
1796
1797void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001798 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001799}
1800
1801void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001802 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001803}
1804
Aart Bike9f37602015-10-09 11:15:55 -07001805void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001806 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001807}
1808
1809void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001810 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001811}
1812
1813void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001814 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001815}
1816
1817void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001818 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001819}
1820
1821void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001822 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001823}
1824
1825void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001826 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001827}
1828
1829void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001830 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001831}
1832
1833void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001834 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001835}
1836
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001837void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001838 LocationSummary* locations =
1839 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001840 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001841 case Primitive::kPrimBoolean:
1842 case Primitive::kPrimByte:
1843 case Primitive::kPrimShort:
1844 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001845 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001846 case Primitive::kPrimLong: {
1847 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001848 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001849 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1850 break;
1851 }
1852 case Primitive::kPrimFloat:
1853 case Primitive::kPrimDouble: {
1854 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001855 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001856 locations->SetOut(Location::RequiresRegister());
1857 break;
1858 }
1859 default:
1860 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1861 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001862}
1863
1864void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001865 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001866 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001867 Location left = locations->InAt(0);
1868 Location right = locations->InAt(1);
1869
Mark Mendell0c9497d2015-08-21 09:30:05 -04001870 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001871 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001872 Condition less_cond = kLess;
1873
Calin Juravleddb7df22014-11-25 20:56:51 +00001874 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001875 case Primitive::kPrimBoolean:
1876 case Primitive::kPrimByte:
1877 case Primitive::kPrimShort:
1878 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001879 case Primitive::kPrimInt: {
1880 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1881 if (right.IsConstant()) {
1882 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1883 codegen_->Compare32BitValue(left_reg, value);
1884 } else if (right.IsStackSlot()) {
1885 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1886 } else {
1887 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1888 }
1889 break;
1890 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001891 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001892 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1893 if (right.IsConstant()) {
1894 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001895 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001896 } else if (right.IsDoubleStackSlot()) {
1897 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001898 } else {
1899 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1900 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001901 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001902 }
1903 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001904 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1905 if (right.IsConstant()) {
1906 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1907 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1908 } else if (right.IsStackSlot()) {
1909 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1910 } else {
1911 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1912 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001913 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001914 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001915 break;
1916 }
1917 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001918 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1919 if (right.IsConstant()) {
1920 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1921 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1922 } else if (right.IsDoubleStackSlot()) {
1923 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1924 } else {
1925 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1926 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001927 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001928 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001929 break;
1930 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001931 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001932 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001933 }
Aart Bika19616e2016-02-01 18:57:58 -08001934
Calin Juravleddb7df22014-11-25 20:56:51 +00001935 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001936 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001937 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001938
Calin Juravle91debbc2014-11-26 19:01:09 +00001939 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001940 __ movl(out, Immediate(1));
1941 __ jmp(&done);
1942
1943 __ Bind(&less);
1944 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001945
1946 __ Bind(&done);
1947}
1948
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001949void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001950 LocationSummary* locations =
1951 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001952 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001953}
1954
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001955void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001956 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001957}
1958
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001959void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1960 LocationSummary* locations =
1961 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1962 locations->SetOut(Location::ConstantLocation(constant));
1963}
1964
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001965void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001966 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001967}
1968
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001969void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001970 LocationSummary* locations =
1971 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001972 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001973}
1974
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001975void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001976 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001977}
1978
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001979void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1980 LocationSummary* locations =
1981 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1982 locations->SetOut(Location::ConstantLocation(constant));
1983}
1984
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001985void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001986 // Will be generated at use site.
1987}
1988
1989void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1990 LocationSummary* locations =
1991 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1992 locations->SetOut(Location::ConstantLocation(constant));
1993}
1994
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001995void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
1996 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001997 // Will be generated at use site.
1998}
1999
Calin Juravle27df7582015-04-17 19:12:31 +01002000void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2001 memory_barrier->SetLocations(nullptr);
2002}
2003
2004void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002005 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002006}
2007
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002008void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2009 ret->SetLocations(nullptr);
2010}
2011
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002012void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002013 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002014}
2015
2016void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002017 LocationSummary* locations =
2018 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002019 switch (ret->InputAt(0)->GetType()) {
2020 case Primitive::kPrimBoolean:
2021 case Primitive::kPrimByte:
2022 case Primitive::kPrimChar:
2023 case Primitive::kPrimShort:
2024 case Primitive::kPrimInt:
2025 case Primitive::kPrimNot:
2026 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002027 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002028 break;
2029
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002030 case Primitive::kPrimFloat:
2031 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002032 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002033 break;
2034
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002035 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002036 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002037 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002038}
2039
2040void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2041 if (kIsDebugBuild) {
2042 switch (ret->InputAt(0)->GetType()) {
2043 case Primitive::kPrimBoolean:
2044 case Primitive::kPrimByte:
2045 case Primitive::kPrimChar:
2046 case Primitive::kPrimShort:
2047 case Primitive::kPrimInt:
2048 case Primitive::kPrimNot:
2049 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002050 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002051 break;
2052
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002053 case Primitive::kPrimFloat:
2054 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002055 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002056 XMM0);
2057 break;
2058
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002059 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002060 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002061 }
2062 }
2063 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002064}
2065
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002066Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2067 switch (type) {
2068 case Primitive::kPrimBoolean:
2069 case Primitive::kPrimByte:
2070 case Primitive::kPrimChar:
2071 case Primitive::kPrimShort:
2072 case Primitive::kPrimInt:
2073 case Primitive::kPrimNot:
2074 case Primitive::kPrimLong:
2075 return Location::RegisterLocation(RAX);
2076
2077 case Primitive::kPrimVoid:
2078 return Location::NoLocation();
2079
2080 case Primitive::kPrimDouble:
2081 case Primitive::kPrimFloat:
2082 return Location::FpuRegisterLocation(XMM0);
2083 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002084
2085 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002086}
2087
2088Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2089 return Location::RegisterLocation(kMethodRegisterArgument);
2090}
2091
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002092Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002093 switch (type) {
2094 case Primitive::kPrimBoolean:
2095 case Primitive::kPrimByte:
2096 case Primitive::kPrimChar:
2097 case Primitive::kPrimShort:
2098 case Primitive::kPrimInt:
2099 case Primitive::kPrimNot: {
2100 uint32_t index = gp_index_++;
2101 stack_index_++;
2102 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002103 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002104 } else {
2105 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2106 }
2107 }
2108
2109 case Primitive::kPrimLong: {
2110 uint32_t index = gp_index_;
2111 stack_index_ += 2;
2112 if (index < calling_convention.GetNumberOfRegisters()) {
2113 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002114 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002115 } else {
2116 gp_index_ += 2;
2117 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2118 }
2119 }
2120
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002121 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002122 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002123 stack_index_++;
2124 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002125 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002126 } else {
2127 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2128 }
2129 }
2130
2131 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002132 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002133 stack_index_ += 2;
2134 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002135 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002136 } else {
2137 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2138 }
2139 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002140
2141 case Primitive::kPrimVoid:
2142 LOG(FATAL) << "Unexpected parameter type " << type;
2143 break;
2144 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002145 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002146}
2147
Calin Juravle175dc732015-08-25 15:42:32 +01002148void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2149 // The trampoline uses the same calling convention as dex calling conventions,
2150 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2151 // the method_idx.
2152 HandleInvoke(invoke);
2153}
2154
2155void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2156 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2157}
2158
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002159void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002160 // Explicit clinit checks triggered by static invokes must have been pruned by
2161 // art::PrepareForRegisterAllocation.
2162 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002163
Mark Mendellfb8d2792015-03-31 22:16:59 -04002164 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002165 if (intrinsic.TryDispatch(invoke)) {
2166 return;
2167 }
2168
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002169 HandleInvoke(invoke);
2170}
2171
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002172static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2173 if (invoke->GetLocations()->Intrinsified()) {
2174 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2175 intrinsic.Dispatch(invoke);
2176 return true;
2177 }
2178 return false;
2179}
2180
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002181void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002182 // Explicit clinit checks triggered by static invokes must have been pruned by
2183 // art::PrepareForRegisterAllocation.
2184 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002185
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002186 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2187 return;
2188 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002189
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002190 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002191 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002192 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002193 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002194}
2195
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002196void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002197 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002198 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002199}
2200
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002201void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002202 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002203 if (intrinsic.TryDispatch(invoke)) {
2204 return;
2205 }
2206
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002207 HandleInvoke(invoke);
2208}
2209
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002210void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002211 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2212 return;
2213 }
2214
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002215 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002216 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002217 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002218}
2219
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002220void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2221 HandleInvoke(invoke);
2222 // Add the hidden argument.
2223 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2224}
2225
2226void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2227 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002228 LocationSummary* locations = invoke->GetLocations();
2229 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2230 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002231 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2232 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002233 Location receiver = locations->InAt(0);
2234 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2235
Roland Levillain0d5a2812015-11-13 10:07:31 +00002236 // Set the hidden argument. This is safe to do this here, as RAX
2237 // won't be modified thereafter, before the `call` instruction.
2238 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002239 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002240
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002241 if (receiver.IsStackSlot()) {
2242 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002243 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002244 __ movl(temp, Address(temp, class_offset));
2245 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002246 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002247 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002248 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002249 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002250 // Instead of simply (possibly) unpoisoning `temp` here, we should
2251 // emit a read barrier for the previous class reference load.
2252 // However this is not required in practice, as this is an
2253 // intermediate/temporary reference and because the current
2254 // concurrent copying collector keeps the from-space memory
2255 // intact/accessible until the end of the marking phase (the
2256 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002257 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002258 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002259 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002260 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002261 __ call(Address(temp,
2262 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002263
2264 DCHECK(!codegen_->IsLeafMethod());
2265 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2266}
2267
Roland Levillain88cb1752014-10-20 16:36:47 +01002268void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2269 LocationSummary* locations =
2270 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2271 switch (neg->GetResultType()) {
2272 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002273 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002274 locations->SetInAt(0, Location::RequiresRegister());
2275 locations->SetOut(Location::SameAsFirstInput());
2276 break;
2277
Roland Levillain88cb1752014-10-20 16:36:47 +01002278 case Primitive::kPrimFloat:
2279 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002280 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002281 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002282 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002283 break;
2284
2285 default:
2286 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2287 }
2288}
2289
2290void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2291 LocationSummary* locations = neg->GetLocations();
2292 Location out = locations->Out();
2293 Location in = locations->InAt(0);
2294 switch (neg->GetResultType()) {
2295 case Primitive::kPrimInt:
2296 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002297 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002298 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002299 break;
2300
2301 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002302 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002303 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002304 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002305 break;
2306
Roland Levillain5368c212014-11-27 15:03:41 +00002307 case Primitive::kPrimFloat: {
2308 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002309 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002310 // Implement float negation with an exclusive or with value
2311 // 0x80000000 (mask for bit 31, representing the sign of a
2312 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002313 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002314 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002315 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002316 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002317
Roland Levillain5368c212014-11-27 15:03:41 +00002318 case Primitive::kPrimDouble: {
2319 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002320 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002321 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002322 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002323 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002324 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002325 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002326 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002327 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002328
2329 default:
2330 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2331 }
2332}
2333
Roland Levillaindff1f282014-11-05 14:15:05 +00002334void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2335 LocationSummary* locations =
2336 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2337 Primitive::Type result_type = conversion->GetResultType();
2338 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002339 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002340
David Brazdilb2bd1c52015-03-25 11:17:37 +00002341 // The Java language does not allow treating boolean as an integral type but
2342 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002343
Roland Levillaindff1f282014-11-05 14:15:05 +00002344 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002345 case Primitive::kPrimByte:
2346 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002347 case Primitive::kPrimLong:
2348 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002349 case Primitive::kPrimBoolean:
2350 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002351 case Primitive::kPrimShort:
2352 case Primitive::kPrimInt:
2353 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002354 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002355 locations->SetInAt(0, Location::Any());
2356 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2357 break;
2358
2359 default:
2360 LOG(FATAL) << "Unexpected type conversion from " << input_type
2361 << " to " << result_type;
2362 }
2363 break;
2364
Roland Levillain01a8d712014-11-14 16:27:39 +00002365 case Primitive::kPrimShort:
2366 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002367 case Primitive::kPrimLong:
2368 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002369 case Primitive::kPrimBoolean:
2370 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002371 case Primitive::kPrimByte:
2372 case Primitive::kPrimInt:
2373 case Primitive::kPrimChar:
2374 // Processing a Dex `int-to-short' instruction.
2375 locations->SetInAt(0, Location::Any());
2376 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2377 break;
2378
2379 default:
2380 LOG(FATAL) << "Unexpected type conversion from " << input_type
2381 << " to " << result_type;
2382 }
2383 break;
2384
Roland Levillain946e1432014-11-11 17:35:19 +00002385 case Primitive::kPrimInt:
2386 switch (input_type) {
2387 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002388 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002389 locations->SetInAt(0, Location::Any());
2390 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2391 break;
2392
2393 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002394 // Processing a Dex `float-to-int' instruction.
2395 locations->SetInAt(0, Location::RequiresFpuRegister());
2396 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002397 break;
2398
Roland Levillain946e1432014-11-11 17:35:19 +00002399 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002400 // Processing a Dex `double-to-int' instruction.
2401 locations->SetInAt(0, Location::RequiresFpuRegister());
2402 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002403 break;
2404
2405 default:
2406 LOG(FATAL) << "Unexpected type conversion from " << input_type
2407 << " to " << result_type;
2408 }
2409 break;
2410
Roland Levillaindff1f282014-11-05 14:15:05 +00002411 case Primitive::kPrimLong:
2412 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002413 case Primitive::kPrimBoolean:
2414 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002415 case Primitive::kPrimByte:
2416 case Primitive::kPrimShort:
2417 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002418 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002419 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002420 // TODO: We would benefit from a (to-be-implemented)
2421 // Location::RegisterOrStackSlot requirement for this input.
2422 locations->SetInAt(0, Location::RequiresRegister());
2423 locations->SetOut(Location::RequiresRegister());
2424 break;
2425
2426 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002427 // Processing a Dex `float-to-long' instruction.
2428 locations->SetInAt(0, Location::RequiresFpuRegister());
2429 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002430 break;
2431
Roland Levillaindff1f282014-11-05 14:15:05 +00002432 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002433 // Processing a Dex `double-to-long' instruction.
2434 locations->SetInAt(0, Location::RequiresFpuRegister());
2435 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002436 break;
2437
2438 default:
2439 LOG(FATAL) << "Unexpected type conversion from " << input_type
2440 << " to " << result_type;
2441 }
2442 break;
2443
Roland Levillain981e4542014-11-14 11:47:14 +00002444 case Primitive::kPrimChar:
2445 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002446 case Primitive::kPrimLong:
2447 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002448 case Primitive::kPrimBoolean:
2449 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002450 case Primitive::kPrimByte:
2451 case Primitive::kPrimShort:
2452 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002453 // Processing a Dex `int-to-char' instruction.
2454 locations->SetInAt(0, Location::Any());
2455 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2456 break;
2457
2458 default:
2459 LOG(FATAL) << "Unexpected type conversion from " << input_type
2460 << " to " << result_type;
2461 }
2462 break;
2463
Roland Levillaindff1f282014-11-05 14:15:05 +00002464 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002465 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002466 case Primitive::kPrimBoolean:
2467 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002468 case Primitive::kPrimByte:
2469 case Primitive::kPrimShort:
2470 case Primitive::kPrimInt:
2471 case Primitive::kPrimChar:
2472 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002473 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002474 locations->SetOut(Location::RequiresFpuRegister());
2475 break;
2476
2477 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002478 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002479 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002480 locations->SetOut(Location::RequiresFpuRegister());
2481 break;
2482
Roland Levillaincff13742014-11-17 14:32:17 +00002483 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002484 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002485 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002486 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002487 break;
2488
2489 default:
2490 LOG(FATAL) << "Unexpected type conversion from " << input_type
2491 << " to " << result_type;
2492 };
2493 break;
2494
Roland Levillaindff1f282014-11-05 14:15:05 +00002495 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002496 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002497 case Primitive::kPrimBoolean:
2498 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002499 case Primitive::kPrimByte:
2500 case Primitive::kPrimShort:
2501 case Primitive::kPrimInt:
2502 case Primitive::kPrimChar:
2503 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002504 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002505 locations->SetOut(Location::RequiresFpuRegister());
2506 break;
2507
2508 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002509 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002510 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002511 locations->SetOut(Location::RequiresFpuRegister());
2512 break;
2513
Roland Levillaincff13742014-11-17 14:32:17 +00002514 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002515 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002516 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002517 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002518 break;
2519
2520 default:
2521 LOG(FATAL) << "Unexpected type conversion from " << input_type
2522 << " to " << result_type;
2523 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002524 break;
2525
2526 default:
2527 LOG(FATAL) << "Unexpected type conversion from " << input_type
2528 << " to " << result_type;
2529 }
2530}
2531
2532void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2533 LocationSummary* locations = conversion->GetLocations();
2534 Location out = locations->Out();
2535 Location in = locations->InAt(0);
2536 Primitive::Type result_type = conversion->GetResultType();
2537 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002538 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002539 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002540 case Primitive::kPrimByte:
2541 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002542 case Primitive::kPrimLong:
2543 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002544 case Primitive::kPrimBoolean:
2545 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002546 case Primitive::kPrimShort:
2547 case Primitive::kPrimInt:
2548 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002549 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002550 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002551 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002552 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002553 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002554 Address(CpuRegister(RSP), in.GetStackIndex()));
2555 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002556 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002557 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002558 }
2559 break;
2560
2561 default:
2562 LOG(FATAL) << "Unexpected type conversion from " << input_type
2563 << " to " << result_type;
2564 }
2565 break;
2566
Roland Levillain01a8d712014-11-14 16:27:39 +00002567 case Primitive::kPrimShort:
2568 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002569 case Primitive::kPrimLong:
2570 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002571 case Primitive::kPrimBoolean:
2572 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002573 case Primitive::kPrimByte:
2574 case Primitive::kPrimInt:
2575 case Primitive::kPrimChar:
2576 // Processing a Dex `int-to-short' instruction.
2577 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002578 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002579 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002580 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002581 Address(CpuRegister(RSP), in.GetStackIndex()));
2582 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002583 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002584 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002585 }
2586 break;
2587
2588 default:
2589 LOG(FATAL) << "Unexpected type conversion from " << input_type
2590 << " to " << result_type;
2591 }
2592 break;
2593
Roland Levillain946e1432014-11-11 17:35:19 +00002594 case Primitive::kPrimInt:
2595 switch (input_type) {
2596 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002597 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002598 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002599 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002600 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002601 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002602 Address(CpuRegister(RSP), in.GetStackIndex()));
2603 } else {
2604 DCHECK(in.IsConstant());
2605 DCHECK(in.GetConstant()->IsLongConstant());
2606 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002607 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002608 }
2609 break;
2610
Roland Levillain3f8f9362014-12-02 17:45:01 +00002611 case Primitive::kPrimFloat: {
2612 // Processing a Dex `float-to-int' instruction.
2613 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2614 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002615 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002616
2617 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002618 // if input >= (float)INT_MAX goto done
2619 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002620 __ j(kAboveEqual, &done);
2621 // if input == NaN goto nan
2622 __ j(kUnordered, &nan);
2623 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002624 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002625 __ jmp(&done);
2626 __ Bind(&nan);
2627 // output = 0
2628 __ xorl(output, output);
2629 __ Bind(&done);
2630 break;
2631 }
2632
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002633 case Primitive::kPrimDouble: {
2634 // Processing a Dex `double-to-int' instruction.
2635 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2636 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002637 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002638
2639 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002640 // if input >= (double)INT_MAX goto done
2641 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002642 __ j(kAboveEqual, &done);
2643 // if input == NaN goto nan
2644 __ j(kUnordered, &nan);
2645 // output = double-to-int-truncate(input)
2646 __ cvttsd2si(output, input);
2647 __ jmp(&done);
2648 __ Bind(&nan);
2649 // output = 0
2650 __ xorl(output, output);
2651 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002652 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002653 }
Roland Levillain946e1432014-11-11 17:35:19 +00002654
2655 default:
2656 LOG(FATAL) << "Unexpected type conversion from " << input_type
2657 << " to " << result_type;
2658 }
2659 break;
2660
Roland Levillaindff1f282014-11-05 14:15:05 +00002661 case Primitive::kPrimLong:
2662 switch (input_type) {
2663 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002664 case Primitive::kPrimBoolean:
2665 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002666 case Primitive::kPrimByte:
2667 case Primitive::kPrimShort:
2668 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002669 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002670 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002671 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002672 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002673 break;
2674
Roland Levillain624279f2014-12-04 11:54:28 +00002675 case Primitive::kPrimFloat: {
2676 // Processing a Dex `float-to-long' instruction.
2677 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2678 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002679 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002680
Mark Mendell92e83bf2015-05-07 11:25:03 -04002681 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002682 // if input >= (float)LONG_MAX goto done
2683 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002684 __ j(kAboveEqual, &done);
2685 // if input == NaN goto nan
2686 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002687 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002688 __ cvttss2si(output, input, true);
2689 __ jmp(&done);
2690 __ Bind(&nan);
2691 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002692 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002693 __ Bind(&done);
2694 break;
2695 }
2696
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002697 case Primitive::kPrimDouble: {
2698 // Processing a Dex `double-to-long' instruction.
2699 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2700 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002701 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002702
Mark Mendell92e83bf2015-05-07 11:25:03 -04002703 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002704 // if input >= (double)LONG_MAX goto done
2705 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002706 __ j(kAboveEqual, &done);
2707 // if input == NaN goto nan
2708 __ j(kUnordered, &nan);
2709 // output = double-to-long-truncate(input)
2710 __ cvttsd2si(output, input, true);
2711 __ jmp(&done);
2712 __ Bind(&nan);
2713 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002714 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002715 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002716 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002717 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002718
2719 default:
2720 LOG(FATAL) << "Unexpected type conversion from " << input_type
2721 << " to " << result_type;
2722 }
2723 break;
2724
Roland Levillain981e4542014-11-14 11:47:14 +00002725 case Primitive::kPrimChar:
2726 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002727 case Primitive::kPrimLong:
2728 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002729 case Primitive::kPrimBoolean:
2730 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002731 case Primitive::kPrimByte:
2732 case Primitive::kPrimShort:
2733 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002734 // Processing a Dex `int-to-char' instruction.
2735 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002736 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002737 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002738 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002739 Address(CpuRegister(RSP), in.GetStackIndex()));
2740 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002741 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002742 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002743 }
2744 break;
2745
2746 default:
2747 LOG(FATAL) << "Unexpected type conversion from " << input_type
2748 << " to " << result_type;
2749 }
2750 break;
2751
Roland Levillaindff1f282014-11-05 14:15:05 +00002752 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002753 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002754 case Primitive::kPrimBoolean:
2755 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002756 case Primitive::kPrimByte:
2757 case Primitive::kPrimShort:
2758 case Primitive::kPrimInt:
2759 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002760 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002761 if (in.IsRegister()) {
2762 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2763 } else if (in.IsConstant()) {
2764 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2765 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002766 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002767 } else {
2768 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2769 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2770 }
Roland Levillaincff13742014-11-17 14:32:17 +00002771 break;
2772
2773 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002774 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002775 if (in.IsRegister()) {
2776 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2777 } else if (in.IsConstant()) {
2778 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2779 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002780 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002781 } else {
2782 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2783 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2784 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002785 break;
2786
Roland Levillaincff13742014-11-17 14:32:17 +00002787 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002788 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002789 if (in.IsFpuRegister()) {
2790 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2791 } else if (in.IsConstant()) {
2792 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2793 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002794 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002795 } else {
2796 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2797 Address(CpuRegister(RSP), in.GetStackIndex()));
2798 }
Roland Levillaincff13742014-11-17 14:32:17 +00002799 break;
2800
2801 default:
2802 LOG(FATAL) << "Unexpected type conversion from " << input_type
2803 << " to " << result_type;
2804 };
2805 break;
2806
Roland Levillaindff1f282014-11-05 14:15:05 +00002807 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002808 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002809 case Primitive::kPrimBoolean:
2810 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002811 case Primitive::kPrimByte:
2812 case Primitive::kPrimShort:
2813 case Primitive::kPrimInt:
2814 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002815 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002816 if (in.IsRegister()) {
2817 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2818 } else if (in.IsConstant()) {
2819 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2820 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002821 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002822 } else {
2823 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2824 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2825 }
Roland Levillaincff13742014-11-17 14:32:17 +00002826 break;
2827
2828 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002829 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002830 if (in.IsRegister()) {
2831 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2832 } else if (in.IsConstant()) {
2833 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2834 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002835 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002836 } else {
2837 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2838 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2839 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002840 break;
2841
Roland Levillaincff13742014-11-17 14:32:17 +00002842 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002843 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002844 if (in.IsFpuRegister()) {
2845 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2846 } else if (in.IsConstant()) {
2847 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2848 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002849 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002850 } else {
2851 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2852 Address(CpuRegister(RSP), in.GetStackIndex()));
2853 }
Roland Levillaincff13742014-11-17 14:32:17 +00002854 break;
2855
2856 default:
2857 LOG(FATAL) << "Unexpected type conversion from " << input_type
2858 << " to " << result_type;
2859 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002860 break;
2861
2862 default:
2863 LOG(FATAL) << "Unexpected type conversion from " << input_type
2864 << " to " << result_type;
2865 }
2866}
2867
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002868void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002869 LocationSummary* locations =
2870 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002871 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002872 case Primitive::kPrimInt: {
2873 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002874 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2875 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002876 break;
2877 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002878
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002879 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002880 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002881 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002882 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002883 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002884 break;
2885 }
2886
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002887 case Primitive::kPrimDouble:
2888 case Primitive::kPrimFloat: {
2889 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002890 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002891 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002892 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002893 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002894
2895 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002896 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002897 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002898}
2899
2900void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2901 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002902 Location first = locations->InAt(0);
2903 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002904 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002905
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002906 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002907 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002908 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002909 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2910 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002911 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2912 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002913 } else {
2914 __ leal(out.AsRegister<CpuRegister>(), Address(
2915 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2916 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002917 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002918 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2919 __ addl(out.AsRegister<CpuRegister>(),
2920 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2921 } else {
2922 __ leal(out.AsRegister<CpuRegister>(), Address(
2923 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2924 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002925 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002926 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002927 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002928 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002929 break;
2930 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002931
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002932 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002933 if (second.IsRegister()) {
2934 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2935 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002936 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2937 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002938 } else {
2939 __ leaq(out.AsRegister<CpuRegister>(), Address(
2940 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2941 }
2942 } else {
2943 DCHECK(second.IsConstant());
2944 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2945 int32_t int32_value = Low32Bits(value);
2946 DCHECK_EQ(int32_value, value);
2947 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2948 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2949 } else {
2950 __ leaq(out.AsRegister<CpuRegister>(), Address(
2951 first.AsRegister<CpuRegister>(), int32_value));
2952 }
2953 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002954 break;
2955 }
2956
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002957 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002958 if (second.IsFpuRegister()) {
2959 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2960 } else if (second.IsConstant()) {
2961 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002962 codegen_->LiteralFloatAddress(
2963 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002964 } else {
2965 DCHECK(second.IsStackSlot());
2966 __ addss(first.AsFpuRegister<XmmRegister>(),
2967 Address(CpuRegister(RSP), second.GetStackIndex()));
2968 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002969 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002970 }
2971
2972 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002973 if (second.IsFpuRegister()) {
2974 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2975 } else if (second.IsConstant()) {
2976 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002977 codegen_->LiteralDoubleAddress(
2978 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002979 } else {
2980 DCHECK(second.IsDoubleStackSlot());
2981 __ addsd(first.AsFpuRegister<XmmRegister>(),
2982 Address(CpuRegister(RSP), second.GetStackIndex()));
2983 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002984 break;
2985 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002986
2987 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002988 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002989 }
2990}
2991
2992void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002993 LocationSummary* locations =
2994 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002995 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002996 case Primitive::kPrimInt: {
2997 locations->SetInAt(0, Location::RequiresRegister());
2998 locations->SetInAt(1, Location::Any());
2999 locations->SetOut(Location::SameAsFirstInput());
3000 break;
3001 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003002 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003003 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003004 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003005 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003006 break;
3007 }
Calin Juravle11351682014-10-23 15:38:15 +01003008 case Primitive::kPrimFloat:
3009 case Primitive::kPrimDouble: {
3010 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003011 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003012 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003013 break;
Calin Juravle11351682014-10-23 15:38:15 +01003014 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003015 default:
Calin Juravle11351682014-10-23 15:38:15 +01003016 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003017 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003018}
3019
3020void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3021 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003022 Location first = locations->InAt(0);
3023 Location second = locations->InAt(1);
3024 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003025 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003026 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003027 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003028 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003029 } else if (second.IsConstant()) {
3030 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003031 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003032 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003033 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003034 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003035 break;
3036 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003038 if (second.IsConstant()) {
3039 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3040 DCHECK(IsInt<32>(value));
3041 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3042 } else {
3043 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3044 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003045 break;
3046 }
3047
Calin Juravle11351682014-10-23 15:38:15 +01003048 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003049 if (second.IsFpuRegister()) {
3050 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3051 } else if (second.IsConstant()) {
3052 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003053 codegen_->LiteralFloatAddress(
3054 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003055 } else {
3056 DCHECK(second.IsStackSlot());
3057 __ subss(first.AsFpuRegister<XmmRegister>(),
3058 Address(CpuRegister(RSP), second.GetStackIndex()));
3059 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003060 break;
Calin Juravle11351682014-10-23 15:38:15 +01003061 }
3062
3063 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003064 if (second.IsFpuRegister()) {
3065 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3066 } else if (second.IsConstant()) {
3067 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003068 codegen_->LiteralDoubleAddress(
3069 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003070 } else {
3071 DCHECK(second.IsDoubleStackSlot());
3072 __ subsd(first.AsFpuRegister<XmmRegister>(),
3073 Address(CpuRegister(RSP), second.GetStackIndex()));
3074 }
Calin Juravle11351682014-10-23 15:38:15 +01003075 break;
3076 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003077
3078 default:
Calin Juravle11351682014-10-23 15:38:15 +01003079 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003080 }
3081}
3082
Calin Juravle34bacdf2014-10-07 20:23:36 +01003083void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3084 LocationSummary* locations =
3085 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3086 switch (mul->GetResultType()) {
3087 case Primitive::kPrimInt: {
3088 locations->SetInAt(0, Location::RequiresRegister());
3089 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003090 if (mul->InputAt(1)->IsIntConstant()) {
3091 // Can use 3 operand multiply.
3092 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3093 } else {
3094 locations->SetOut(Location::SameAsFirstInput());
3095 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003096 break;
3097 }
3098 case Primitive::kPrimLong: {
3099 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003100 locations->SetInAt(1, Location::Any());
3101 if (mul->InputAt(1)->IsLongConstant() &&
3102 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003103 // Can use 3 operand multiply.
3104 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3105 } else {
3106 locations->SetOut(Location::SameAsFirstInput());
3107 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003108 break;
3109 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003110 case Primitive::kPrimFloat:
3111 case Primitive::kPrimDouble: {
3112 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003113 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003114 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003115 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003116 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003117
3118 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003119 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003120 }
3121}
3122
3123void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3124 LocationSummary* locations = mul->GetLocations();
3125 Location first = locations->InAt(0);
3126 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003127 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003128 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003129 case Primitive::kPrimInt:
3130 // The constant may have ended up in a register, so test explicitly to avoid
3131 // problems where the output may not be the same as the first operand.
3132 if (mul->InputAt(1)->IsIntConstant()) {
3133 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3134 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3135 } else if (second.IsRegister()) {
3136 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003137 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003138 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003139 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003140 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003141 __ imull(first.AsRegister<CpuRegister>(),
3142 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003143 }
3144 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003145 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003146 // The constant may have ended up in a register, so test explicitly to avoid
3147 // problems where the output may not be the same as the first operand.
3148 if (mul->InputAt(1)->IsLongConstant()) {
3149 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3150 if (IsInt<32>(value)) {
3151 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3152 Immediate(static_cast<int32_t>(value)));
3153 } else {
3154 // Have to use the constant area.
3155 DCHECK(first.Equals(out));
3156 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3157 }
3158 } else if (second.IsRegister()) {
3159 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003160 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003161 } else {
3162 DCHECK(second.IsDoubleStackSlot());
3163 DCHECK(first.Equals(out));
3164 __ imulq(first.AsRegister<CpuRegister>(),
3165 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003166 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003167 break;
3168 }
3169
Calin Juravleb5bfa962014-10-21 18:02:24 +01003170 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003171 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003172 if (second.IsFpuRegister()) {
3173 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3174 } else if (second.IsConstant()) {
3175 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003176 codegen_->LiteralFloatAddress(
3177 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003178 } else {
3179 DCHECK(second.IsStackSlot());
3180 __ mulss(first.AsFpuRegister<XmmRegister>(),
3181 Address(CpuRegister(RSP), second.GetStackIndex()));
3182 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003183 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003184 }
3185
3186 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003187 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003188 if (second.IsFpuRegister()) {
3189 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3190 } else if (second.IsConstant()) {
3191 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003192 codegen_->LiteralDoubleAddress(
3193 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003194 } else {
3195 DCHECK(second.IsDoubleStackSlot());
3196 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3197 Address(CpuRegister(RSP), second.GetStackIndex()));
3198 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003199 break;
3200 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003201
3202 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003203 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003204 }
3205}
3206
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003207void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3208 uint32_t stack_adjustment, bool is_float) {
3209 if (source.IsStackSlot()) {
3210 DCHECK(is_float);
3211 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3212 } else if (source.IsDoubleStackSlot()) {
3213 DCHECK(!is_float);
3214 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3215 } else {
3216 // Write the value to the temporary location on the stack and load to FP stack.
3217 if (is_float) {
3218 Location stack_temp = Location::StackSlot(temp_offset);
3219 codegen_->Move(stack_temp, source);
3220 __ flds(Address(CpuRegister(RSP), temp_offset));
3221 } else {
3222 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3223 codegen_->Move(stack_temp, source);
3224 __ fldl(Address(CpuRegister(RSP), temp_offset));
3225 }
3226 }
3227}
3228
3229void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3230 Primitive::Type type = rem->GetResultType();
3231 bool is_float = type == Primitive::kPrimFloat;
3232 size_t elem_size = Primitive::ComponentSize(type);
3233 LocationSummary* locations = rem->GetLocations();
3234 Location first = locations->InAt(0);
3235 Location second = locations->InAt(1);
3236 Location out = locations->Out();
3237
3238 // Create stack space for 2 elements.
3239 // TODO: enhance register allocator to ask for stack temporaries.
3240 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3241
3242 // Load the values to the FP stack in reverse order, using temporaries if needed.
3243 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3244 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3245
3246 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003247 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003248 __ Bind(&retry);
3249 __ fprem();
3250
3251 // Move FP status to AX.
3252 __ fstsw();
3253
3254 // And see if the argument reduction is complete. This is signaled by the
3255 // C2 FPU flag bit set to 0.
3256 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3257 __ j(kNotEqual, &retry);
3258
3259 // We have settled on the final value. Retrieve it into an XMM register.
3260 // Store FP top of stack to real stack.
3261 if (is_float) {
3262 __ fsts(Address(CpuRegister(RSP), 0));
3263 } else {
3264 __ fstl(Address(CpuRegister(RSP), 0));
3265 }
3266
3267 // Pop the 2 items from the FP stack.
3268 __ fucompp();
3269
3270 // Load the value from the stack into an XMM register.
3271 DCHECK(out.IsFpuRegister()) << out;
3272 if (is_float) {
3273 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3274 } else {
3275 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3276 }
3277
3278 // And remove the temporary stack space we allocated.
3279 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3280}
3281
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003282void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3283 DCHECK(instruction->IsDiv() || instruction->IsRem());
3284
3285 LocationSummary* locations = instruction->GetLocations();
3286 Location second = locations->InAt(1);
3287 DCHECK(second.IsConstant());
3288
3289 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3290 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003291 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003292
3293 DCHECK(imm == 1 || imm == -1);
3294
3295 switch (instruction->GetResultType()) {
3296 case Primitive::kPrimInt: {
3297 if (instruction->IsRem()) {
3298 __ xorl(output_register, output_register);
3299 } else {
3300 __ movl(output_register, input_register);
3301 if (imm == -1) {
3302 __ negl(output_register);
3303 }
3304 }
3305 break;
3306 }
3307
3308 case Primitive::kPrimLong: {
3309 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003310 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003311 } else {
3312 __ movq(output_register, input_register);
3313 if (imm == -1) {
3314 __ negq(output_register);
3315 }
3316 }
3317 break;
3318 }
3319
3320 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003321 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003322 }
3323}
3324
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003325void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003326 LocationSummary* locations = instruction->GetLocations();
3327 Location second = locations->InAt(1);
3328
3329 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3330 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3331
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003332 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003333 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3334 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003335
3336 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3337
3338 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003339 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003340 __ testl(numerator, numerator);
3341 __ cmov(kGreaterEqual, tmp, numerator);
3342 int shift = CTZ(imm);
3343 __ sarl(tmp, Immediate(shift));
3344
3345 if (imm < 0) {
3346 __ negl(tmp);
3347 }
3348
3349 __ movl(output_register, tmp);
3350 } else {
3351 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3352 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3353
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003354 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003355 __ addq(rdx, numerator);
3356 __ testq(numerator, numerator);
3357 __ cmov(kGreaterEqual, rdx, numerator);
3358 int shift = CTZ(imm);
3359 __ sarq(rdx, Immediate(shift));
3360
3361 if (imm < 0) {
3362 __ negq(rdx);
3363 }
3364
3365 __ movq(output_register, rdx);
3366 }
3367}
3368
3369void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3370 DCHECK(instruction->IsDiv() || instruction->IsRem());
3371
3372 LocationSummary* locations = instruction->GetLocations();
3373 Location second = locations->InAt(1);
3374
3375 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3376 : locations->GetTemp(0).AsRegister<CpuRegister>();
3377 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3378 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3379 : locations->Out().AsRegister<CpuRegister>();
3380 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3381
3382 DCHECK_EQ(RAX, eax.AsRegister());
3383 DCHECK_EQ(RDX, edx.AsRegister());
3384 if (instruction->IsDiv()) {
3385 DCHECK_EQ(RAX, out.AsRegister());
3386 } else {
3387 DCHECK_EQ(RDX, out.AsRegister());
3388 }
3389
3390 int64_t magic;
3391 int shift;
3392
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003393 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003394 if (instruction->GetResultType() == Primitive::kPrimInt) {
3395 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3396
3397 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3398
3399 __ movl(numerator, eax);
3400
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003401 __ movl(eax, Immediate(magic));
3402 __ imull(numerator);
3403
3404 if (imm > 0 && magic < 0) {
3405 __ addl(edx, numerator);
3406 } else if (imm < 0 && magic > 0) {
3407 __ subl(edx, numerator);
3408 }
3409
3410 if (shift != 0) {
3411 __ sarl(edx, Immediate(shift));
3412 }
3413
3414 __ movl(eax, edx);
3415 __ shrl(edx, Immediate(31));
3416 __ addl(edx, eax);
3417
3418 if (instruction->IsRem()) {
3419 __ movl(eax, numerator);
3420 __ imull(edx, Immediate(imm));
3421 __ subl(eax, edx);
3422 __ movl(edx, eax);
3423 } else {
3424 __ movl(eax, edx);
3425 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003426 } else {
3427 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3428
3429 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3430
3431 CpuRegister rax = eax;
3432 CpuRegister rdx = edx;
3433
3434 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3435
3436 // Save the numerator.
3437 __ movq(numerator, rax);
3438
3439 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003440 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003441
3442 // RDX:RAX = magic * numerator
3443 __ imulq(numerator);
3444
3445 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003446 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003447 __ addq(rdx, numerator);
3448 } else if (imm < 0 && magic > 0) {
3449 // RDX -= numerator
3450 __ subq(rdx, numerator);
3451 }
3452
3453 // Shift if needed.
3454 if (shift != 0) {
3455 __ sarq(rdx, Immediate(shift));
3456 }
3457
3458 // RDX += 1 if RDX < 0
3459 __ movq(rax, rdx);
3460 __ shrq(rdx, Immediate(63));
3461 __ addq(rdx, rax);
3462
3463 if (instruction->IsRem()) {
3464 __ movq(rax, numerator);
3465
3466 if (IsInt<32>(imm)) {
3467 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3468 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003469 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003470 }
3471
3472 __ subq(rax, rdx);
3473 __ movq(rdx, rax);
3474 } else {
3475 __ movq(rax, rdx);
3476 }
3477 }
3478}
3479
Calin Juravlebacfec32014-11-14 15:54:36 +00003480void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3481 DCHECK(instruction->IsDiv() || instruction->IsRem());
3482 Primitive::Type type = instruction->GetResultType();
3483 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3484
3485 bool is_div = instruction->IsDiv();
3486 LocationSummary* locations = instruction->GetLocations();
3487
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003488 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3489 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003490
Roland Levillain271ab9c2014-11-27 15:23:57 +00003491 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003492 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003493
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003494 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003495 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003496
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003497 if (imm == 0) {
3498 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3499 } else if (imm == 1 || imm == -1) {
3500 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003501 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003502 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003503 } else {
3504 DCHECK(imm <= -2 || imm >= 2);
3505 GenerateDivRemWithAnyConstant(instruction);
3506 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003507 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003508 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003509 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003510 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003511 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003512
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3514 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3515 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3516 // so it's safe to just use negl instead of more complex comparisons.
3517 if (type == Primitive::kPrimInt) {
3518 __ cmpl(second_reg, Immediate(-1));
3519 __ j(kEqual, slow_path->GetEntryLabel());
3520 // edx:eax <- sign-extended of eax
3521 __ cdq();
3522 // eax = quotient, edx = remainder
3523 __ idivl(second_reg);
3524 } else {
3525 __ cmpq(second_reg, Immediate(-1));
3526 __ j(kEqual, slow_path->GetEntryLabel());
3527 // rdx:rax <- sign-extended of rax
3528 __ cqo();
3529 // rax = quotient, rdx = remainder
3530 __ idivq(second_reg);
3531 }
3532 __ Bind(slow_path->GetExitLabel());
3533 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003534}
3535
Calin Juravle7c4954d2014-10-28 16:57:40 +00003536void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3537 LocationSummary* locations =
3538 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3539 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003540 case Primitive::kPrimInt:
3541 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003542 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003543 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003544 locations->SetOut(Location::SameAsFirstInput());
3545 // Intel uses edx:eax as the dividend.
3546 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003547 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3548 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3549 // output and request another temp.
3550 if (div->InputAt(1)->IsConstant()) {
3551 locations->AddTemp(Location::RequiresRegister());
3552 }
Calin Juravled0d48522014-11-04 16:40:20 +00003553 break;
3554 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003555
Calin Juravle7c4954d2014-10-28 16:57:40 +00003556 case Primitive::kPrimFloat:
3557 case Primitive::kPrimDouble: {
3558 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003559 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003560 locations->SetOut(Location::SameAsFirstInput());
3561 break;
3562 }
3563
3564 default:
3565 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3566 }
3567}
3568
3569void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3570 LocationSummary* locations = div->GetLocations();
3571 Location first = locations->InAt(0);
3572 Location second = locations->InAt(1);
3573 DCHECK(first.Equals(locations->Out()));
3574
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003575 Primitive::Type type = div->GetResultType();
3576 switch (type) {
3577 case Primitive::kPrimInt:
3578 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003579 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003580 break;
3581 }
3582
Calin Juravle7c4954d2014-10-28 16:57:40 +00003583 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003584 if (second.IsFpuRegister()) {
3585 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3586 } else if (second.IsConstant()) {
3587 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003588 codegen_->LiteralFloatAddress(
3589 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003590 } else {
3591 DCHECK(second.IsStackSlot());
3592 __ divss(first.AsFpuRegister<XmmRegister>(),
3593 Address(CpuRegister(RSP), second.GetStackIndex()));
3594 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003595 break;
3596 }
3597
3598 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003599 if (second.IsFpuRegister()) {
3600 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3601 } else if (second.IsConstant()) {
3602 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003603 codegen_->LiteralDoubleAddress(
3604 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003605 } else {
3606 DCHECK(second.IsDoubleStackSlot());
3607 __ divsd(first.AsFpuRegister<XmmRegister>(),
3608 Address(CpuRegister(RSP), second.GetStackIndex()));
3609 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003610 break;
3611 }
3612
3613 default:
3614 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3615 }
3616}
3617
Calin Juravlebacfec32014-11-14 15:54:36 +00003618void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003619 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003620 LocationSummary* locations =
3621 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003622
3623 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003624 case Primitive::kPrimInt:
3625 case Primitive::kPrimLong: {
3626 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003627 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003628 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3629 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003630 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3631 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3632 // output and request another temp.
3633 if (rem->InputAt(1)->IsConstant()) {
3634 locations->AddTemp(Location::RequiresRegister());
3635 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003636 break;
3637 }
3638
3639 case Primitive::kPrimFloat:
3640 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003641 locations->SetInAt(0, Location::Any());
3642 locations->SetInAt(1, Location::Any());
3643 locations->SetOut(Location::RequiresFpuRegister());
3644 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003645 break;
3646 }
3647
3648 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003649 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003650 }
3651}
3652
3653void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3654 Primitive::Type type = rem->GetResultType();
3655 switch (type) {
3656 case Primitive::kPrimInt:
3657 case Primitive::kPrimLong: {
3658 GenerateDivRemIntegral(rem);
3659 break;
3660 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003661 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003662 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003663 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003664 break;
3665 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003666 default:
3667 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3668 }
3669}
3670
Calin Juravled0d48522014-11-04 16:40:20 +00003671void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003672 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3673 ? LocationSummary::kCallOnSlowPath
3674 : LocationSummary::kNoCall;
3675 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003676 locations->SetInAt(0, Location::Any());
3677 if (instruction->HasUses()) {
3678 locations->SetOut(Location::SameAsFirstInput());
3679 }
3680}
3681
3682void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003683 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003684 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3685 codegen_->AddSlowPath(slow_path);
3686
3687 LocationSummary* locations = instruction->GetLocations();
3688 Location value = locations->InAt(0);
3689
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003690 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003691 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003692 case Primitive::kPrimByte:
3693 case Primitive::kPrimChar:
3694 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003695 case Primitive::kPrimInt: {
3696 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003697 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003698 __ j(kEqual, slow_path->GetEntryLabel());
3699 } else if (value.IsStackSlot()) {
3700 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3701 __ j(kEqual, slow_path->GetEntryLabel());
3702 } else {
3703 DCHECK(value.IsConstant()) << value;
3704 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3705 __ jmp(slow_path->GetEntryLabel());
3706 }
3707 }
3708 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003709 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003710 case Primitive::kPrimLong: {
3711 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003712 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003713 __ j(kEqual, slow_path->GetEntryLabel());
3714 } else if (value.IsDoubleStackSlot()) {
3715 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3716 __ j(kEqual, slow_path->GetEntryLabel());
3717 } else {
3718 DCHECK(value.IsConstant()) << value;
3719 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3720 __ jmp(slow_path->GetEntryLabel());
3721 }
3722 }
3723 break;
3724 }
3725 default:
3726 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003727 }
Calin Juravled0d48522014-11-04 16:40:20 +00003728}
3729
Calin Juravle9aec02f2014-11-18 23:06:35 +00003730void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3731 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3732
3733 LocationSummary* locations =
3734 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3735
3736 switch (op->GetResultType()) {
3737 case Primitive::kPrimInt:
3738 case Primitive::kPrimLong: {
3739 locations->SetInAt(0, Location::RequiresRegister());
3740 // The shift count needs to be in CL.
3741 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3742 locations->SetOut(Location::SameAsFirstInput());
3743 break;
3744 }
3745 default:
3746 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3747 }
3748}
3749
3750void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3751 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3752
3753 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003754 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003755 Location second = locations->InAt(1);
3756
3757 switch (op->GetResultType()) {
3758 case Primitive::kPrimInt: {
3759 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003760 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003761 if (op->IsShl()) {
3762 __ shll(first_reg, second_reg);
3763 } else if (op->IsShr()) {
3764 __ sarl(first_reg, second_reg);
3765 } else {
3766 __ shrl(first_reg, second_reg);
3767 }
3768 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003769 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003770 if (op->IsShl()) {
3771 __ shll(first_reg, imm);
3772 } else if (op->IsShr()) {
3773 __ sarl(first_reg, imm);
3774 } else {
3775 __ shrl(first_reg, imm);
3776 }
3777 }
3778 break;
3779 }
3780 case Primitive::kPrimLong: {
3781 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003782 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003783 if (op->IsShl()) {
3784 __ shlq(first_reg, second_reg);
3785 } else if (op->IsShr()) {
3786 __ sarq(first_reg, second_reg);
3787 } else {
3788 __ shrq(first_reg, second_reg);
3789 }
3790 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003791 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003792 if (op->IsShl()) {
3793 __ shlq(first_reg, imm);
3794 } else if (op->IsShr()) {
3795 __ sarq(first_reg, imm);
3796 } else {
3797 __ shrq(first_reg, imm);
3798 }
3799 }
3800 break;
3801 }
3802 default:
3803 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003804 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003805 }
3806}
3807
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003808void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3809 LocationSummary* locations =
3810 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3811
3812 switch (ror->GetResultType()) {
3813 case Primitive::kPrimInt:
3814 case Primitive::kPrimLong: {
3815 locations->SetInAt(0, Location::RequiresRegister());
3816 // The shift count needs to be in CL (unless it is a constant).
3817 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3818 locations->SetOut(Location::SameAsFirstInput());
3819 break;
3820 }
3821 default:
3822 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3823 UNREACHABLE();
3824 }
3825}
3826
3827void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3828 LocationSummary* locations = ror->GetLocations();
3829 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3830 Location second = locations->InAt(1);
3831
3832 switch (ror->GetResultType()) {
3833 case Primitive::kPrimInt:
3834 if (second.IsRegister()) {
3835 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3836 __ rorl(first_reg, second_reg);
3837 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003838 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003839 __ rorl(first_reg, imm);
3840 }
3841 break;
3842 case Primitive::kPrimLong:
3843 if (second.IsRegister()) {
3844 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3845 __ rorq(first_reg, second_reg);
3846 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003847 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003848 __ rorq(first_reg, imm);
3849 }
3850 break;
3851 default:
3852 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3853 UNREACHABLE();
3854 }
3855}
3856
Calin Juravle9aec02f2014-11-18 23:06:35 +00003857void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3858 HandleShift(shl);
3859}
3860
3861void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3862 HandleShift(shl);
3863}
3864
3865void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3866 HandleShift(shr);
3867}
3868
3869void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3870 HandleShift(shr);
3871}
3872
3873void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3874 HandleShift(ushr);
3875}
3876
3877void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3878 HandleShift(ushr);
3879}
3880
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003881void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003882 LocationSummary* locations =
3883 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003884 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003885 if (instruction->IsStringAlloc()) {
3886 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3887 } else {
3888 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3889 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3890 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003891 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003892}
3893
3894void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003895 // Note: if heap poisoning is enabled, the entry point takes cares
3896 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003897 if (instruction->IsStringAlloc()) {
3898 // String is allocated through StringFactory. Call NewEmptyString entry point.
3899 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3900 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3901 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3902 __ call(Address(temp, code_offset.SizeValue()));
3903 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3904 } else {
3905 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3906 instruction,
3907 instruction->GetDexPc(),
3908 nullptr);
3909 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3910 DCHECK(!codegen_->IsLeafMethod());
3911 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003912}
3913
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003914void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3915 LocationSummary* locations =
3916 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3917 InvokeRuntimeCallingConvention calling_convention;
3918 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003919 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003920 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003921 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003922}
3923
3924void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3925 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003926 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3927 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003928 // Note: if heap poisoning is enabled, the entry point takes cares
3929 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003930 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3931 instruction,
3932 instruction->GetDexPc(),
3933 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003934 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003935
3936 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003937}
3938
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003939void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003940 LocationSummary* locations =
3941 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003942 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3943 if (location.IsStackSlot()) {
3944 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3945 } else if (location.IsDoubleStackSlot()) {
3946 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3947 }
3948 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003949}
3950
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003951void InstructionCodeGeneratorX86_64::VisitParameterValue(
3952 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003953 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003954}
3955
3956void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3957 LocationSummary* locations =
3958 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3959 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3960}
3961
3962void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3963 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3964 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003965}
3966
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003967void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3968 LocationSummary* locations =
3969 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3970 locations->SetInAt(0, Location::RequiresRegister());
3971 locations->SetOut(Location::RequiresRegister());
3972}
3973
3974void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3975 LocationSummary* locations = instruction->GetLocations();
3976 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00003977 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003978 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3979 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
3980 } else {
3981 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
3982 instruction->GetIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
3983 }
3984 __ movq(locations->Out().AsRegister<CpuRegister>(),
3985 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
3986}
3987
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003988void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003989 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003990 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003991 locations->SetInAt(0, Location::RequiresRegister());
3992 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003993}
3994
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003995void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
3996 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003997 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3998 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003999 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004000 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004001 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004002 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004003 break;
4004
4005 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004006 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004007 break;
4008
4009 default:
4010 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4011 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004012}
4013
David Brazdil66d126e2015-04-03 16:02:44 +01004014void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4015 LocationSummary* locations =
4016 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4017 locations->SetInAt(0, Location::RequiresRegister());
4018 locations->SetOut(Location::SameAsFirstInput());
4019}
4020
4021void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004022 LocationSummary* locations = bool_not->GetLocations();
4023 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4024 locations->Out().AsRegister<CpuRegister>().AsRegister());
4025 Location out = locations->Out();
4026 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4027}
4028
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004029void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004030 LocationSummary* locations =
4031 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004032 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004033 locations->SetInAt(i, Location::Any());
4034 }
4035 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004036}
4037
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004038void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004039 LOG(FATAL) << "Unimplemented";
4040}
4041
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004042void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004043 /*
4044 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004045 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004046 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4047 */
4048 switch (kind) {
4049 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004050 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004051 break;
4052 }
4053 case MemBarrierKind::kAnyStore:
4054 case MemBarrierKind::kLoadAny:
4055 case MemBarrierKind::kStoreStore: {
4056 // nop
4057 break;
4058 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004059 case MemBarrierKind::kNTStoreStore:
4060 // Non-Temporal Store/Store needs an explicit fence.
4061 MemoryFence(/* non-temporal */ true);
4062 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004063 }
4064}
4065
4066void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4067 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4068
Roland Levillain0d5a2812015-11-13 10:07:31 +00004069 bool object_field_get_with_read_barrier =
4070 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004071 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004072 new (GetGraph()->GetArena()) LocationSummary(instruction,
4073 object_field_get_with_read_barrier ?
4074 LocationSummary::kCallOnSlowPath :
4075 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004076 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004077 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4078 locations->SetOut(Location::RequiresFpuRegister());
4079 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004080 // The output overlaps for an object field get when read barriers
4081 // are enabled: we do not want the move to overwrite the object's
4082 // location, as we need it to emit the read barrier.
4083 locations->SetOut(
4084 Location::RequiresRegister(),
4085 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004086 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004087 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4088 // We need a temporary register for the read barrier marking slow
4089 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4090 locations->AddTemp(Location::RequiresRegister());
4091 }
Calin Juravle52c48962014-12-16 17:02:57 +00004092}
4093
4094void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4095 const FieldInfo& field_info) {
4096 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4097
4098 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004099 Location base_loc = locations->InAt(0);
4100 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004101 Location out = locations->Out();
4102 bool is_volatile = field_info.IsVolatile();
4103 Primitive::Type field_type = field_info.GetFieldType();
4104 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4105
4106 switch (field_type) {
4107 case Primitive::kPrimBoolean: {
4108 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4109 break;
4110 }
4111
4112 case Primitive::kPrimByte: {
4113 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4114 break;
4115 }
4116
4117 case Primitive::kPrimShort: {
4118 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4119 break;
4120 }
4121
4122 case Primitive::kPrimChar: {
4123 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4124 break;
4125 }
4126
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004127 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004128 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4129 break;
4130 }
4131
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004132 case Primitive::kPrimNot: {
4133 // /* HeapReference<Object> */ out = *(base + offset)
4134 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4135 Location temp_loc = locations->GetTemp(0);
4136 // Note that a potential implicit null check is handled in this
4137 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4138 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4139 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4140 if (is_volatile) {
4141 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4142 }
4143 } else {
4144 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4145 codegen_->MaybeRecordImplicitNullCheck(instruction);
4146 if (is_volatile) {
4147 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4148 }
4149 // If read barriers are enabled, emit read barriers other than
4150 // Baker's using a slow path (and also unpoison the loaded
4151 // reference, if heap poisoning is enabled).
4152 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4153 }
4154 break;
4155 }
4156
Calin Juravle52c48962014-12-16 17:02:57 +00004157 case Primitive::kPrimLong: {
4158 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4159 break;
4160 }
4161
4162 case Primitive::kPrimFloat: {
4163 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4164 break;
4165 }
4166
4167 case Primitive::kPrimDouble: {
4168 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4169 break;
4170 }
4171
4172 case Primitive::kPrimVoid:
4173 LOG(FATAL) << "Unreachable type " << field_type;
4174 UNREACHABLE();
4175 }
4176
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004177 if (field_type == Primitive::kPrimNot) {
4178 // Potential implicit null checks, in the case of reference
4179 // fields, are handled in the previous switch statement.
4180 } else {
4181 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004182 }
Roland Levillain4d027112015-07-01 15:41:14 +01004183
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004184 if (is_volatile) {
4185 if (field_type == Primitive::kPrimNot) {
4186 // Memory barriers, in the case of references, are also handled
4187 // in the previous switch statement.
4188 } else {
4189 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4190 }
Roland Levillain4d027112015-07-01 15:41:14 +01004191 }
Calin Juravle52c48962014-12-16 17:02:57 +00004192}
4193
4194void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4195 const FieldInfo& field_info) {
4196 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4197
4198 LocationSummary* locations =
4199 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004200 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004201 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004202 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004203 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004204
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004205 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004206 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004207 if (is_volatile) {
4208 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4209 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4210 } else {
4211 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4212 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004213 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004214 if (is_volatile) {
4215 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4216 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4217 } else {
4218 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4219 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004220 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004221 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004222 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004223 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004224 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004225 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4226 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004227 locations->AddTemp(Location::RequiresRegister());
4228 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004229}
4230
Calin Juravle52c48962014-12-16 17:02:57 +00004231void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004232 const FieldInfo& field_info,
4233 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004234 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4235
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004236 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004237 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4238 Location value = locations->InAt(1);
4239 bool is_volatile = field_info.IsVolatile();
4240 Primitive::Type field_type = field_info.GetFieldType();
4241 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4242
4243 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004244 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004245 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004246
Mark Mendellea5af682015-10-22 17:35:49 -04004247 bool maybe_record_implicit_null_check_done = false;
4248
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004249 switch (field_type) {
4250 case Primitive::kPrimBoolean:
4251 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004252 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004253 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004254 __ movb(Address(base, offset), Immediate(v));
4255 } else {
4256 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4257 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004258 break;
4259 }
4260
4261 case Primitive::kPrimShort:
4262 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004263 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004264 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004265 __ movw(Address(base, offset), Immediate(v));
4266 } else {
4267 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4268 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004269 break;
4270 }
4271
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004272 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004273 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004274 if (value.IsConstant()) {
4275 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004276 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4277 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4278 // Note: if heap poisoning is enabled, no need to poison
4279 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004280 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004281 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004282 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4283 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4284 __ movl(temp, value.AsRegister<CpuRegister>());
4285 __ PoisonHeapReference(temp);
4286 __ movl(Address(base, offset), temp);
4287 } else {
4288 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4289 }
Mark Mendell40741f32015-04-20 22:10:34 -04004290 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004291 break;
4292 }
4293
4294 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004295 if (value.IsConstant()) {
4296 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004297 codegen_->MoveInt64ToAddress(Address(base, offset),
4298 Address(base, offset + sizeof(int32_t)),
4299 v,
4300 instruction);
4301 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004302 } else {
4303 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4304 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004305 break;
4306 }
4307
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004308 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004309 if (value.IsConstant()) {
4310 int32_t v =
4311 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4312 __ movl(Address(base, offset), Immediate(v));
4313 } else {
4314 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4315 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004316 break;
4317 }
4318
4319 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004320 if (value.IsConstant()) {
4321 int64_t v =
4322 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4323 codegen_->MoveInt64ToAddress(Address(base, offset),
4324 Address(base, offset + sizeof(int32_t)),
4325 v,
4326 instruction);
4327 maybe_record_implicit_null_check_done = true;
4328 } else {
4329 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4330 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004331 break;
4332 }
4333
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004334 case Primitive::kPrimVoid:
4335 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004336 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004337 }
Calin Juravle52c48962014-12-16 17:02:57 +00004338
Mark Mendellea5af682015-10-22 17:35:49 -04004339 if (!maybe_record_implicit_null_check_done) {
4340 codegen_->MaybeRecordImplicitNullCheck(instruction);
4341 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004342
4343 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4344 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4345 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004346 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004347 }
4348
Calin Juravle52c48962014-12-16 17:02:57 +00004349 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004350 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004351 }
4352}
4353
4354void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4355 HandleFieldSet(instruction, instruction->GetFieldInfo());
4356}
4357
4358void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004359 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004360}
4361
4362void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004363 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004364}
4365
4366void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004367 HandleFieldGet(instruction, instruction->GetFieldInfo());
4368}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004369
Calin Juravle52c48962014-12-16 17:02:57 +00004370void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4371 HandleFieldGet(instruction);
4372}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004373
Calin Juravle52c48962014-12-16 17:02:57 +00004374void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4375 HandleFieldGet(instruction, instruction->GetFieldInfo());
4376}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004377
Calin Juravle52c48962014-12-16 17:02:57 +00004378void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4379 HandleFieldSet(instruction, instruction->GetFieldInfo());
4380}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004381
Calin Juravle52c48962014-12-16 17:02:57 +00004382void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004383 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004384}
4385
Calin Juravlee460d1d2015-09-29 04:52:17 +01004386void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4387 HUnresolvedInstanceFieldGet* instruction) {
4388 FieldAccessCallingConventionX86_64 calling_convention;
4389 codegen_->CreateUnresolvedFieldLocationSummary(
4390 instruction, instruction->GetFieldType(), calling_convention);
4391}
4392
4393void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4394 HUnresolvedInstanceFieldGet* instruction) {
4395 FieldAccessCallingConventionX86_64 calling_convention;
4396 codegen_->GenerateUnresolvedFieldAccess(instruction,
4397 instruction->GetFieldType(),
4398 instruction->GetFieldIndex(),
4399 instruction->GetDexPc(),
4400 calling_convention);
4401}
4402
4403void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4404 HUnresolvedInstanceFieldSet* instruction) {
4405 FieldAccessCallingConventionX86_64 calling_convention;
4406 codegen_->CreateUnresolvedFieldLocationSummary(
4407 instruction, instruction->GetFieldType(), calling_convention);
4408}
4409
4410void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4411 HUnresolvedInstanceFieldSet* instruction) {
4412 FieldAccessCallingConventionX86_64 calling_convention;
4413 codegen_->GenerateUnresolvedFieldAccess(instruction,
4414 instruction->GetFieldType(),
4415 instruction->GetFieldIndex(),
4416 instruction->GetDexPc(),
4417 calling_convention);
4418}
4419
4420void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4421 HUnresolvedStaticFieldGet* instruction) {
4422 FieldAccessCallingConventionX86_64 calling_convention;
4423 codegen_->CreateUnresolvedFieldLocationSummary(
4424 instruction, instruction->GetFieldType(), calling_convention);
4425}
4426
4427void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4428 HUnresolvedStaticFieldGet* instruction) {
4429 FieldAccessCallingConventionX86_64 calling_convention;
4430 codegen_->GenerateUnresolvedFieldAccess(instruction,
4431 instruction->GetFieldType(),
4432 instruction->GetFieldIndex(),
4433 instruction->GetDexPc(),
4434 calling_convention);
4435}
4436
4437void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4438 HUnresolvedStaticFieldSet* instruction) {
4439 FieldAccessCallingConventionX86_64 calling_convention;
4440 codegen_->CreateUnresolvedFieldLocationSummary(
4441 instruction, instruction->GetFieldType(), calling_convention);
4442}
4443
4444void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4445 HUnresolvedStaticFieldSet* instruction) {
4446 FieldAccessCallingConventionX86_64 calling_convention;
4447 codegen_->GenerateUnresolvedFieldAccess(instruction,
4448 instruction->GetFieldType(),
4449 instruction->GetFieldIndex(),
4450 instruction->GetDexPc(),
4451 calling_convention);
4452}
4453
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004454void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004455 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4456 ? LocationSummary::kCallOnSlowPath
4457 : LocationSummary::kNoCall;
4458 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4459 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004460 ? Location::RequiresRegister()
4461 : Location::Any();
4462 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004463 if (instruction->HasUses()) {
4464 locations->SetOut(Location::SameAsFirstInput());
4465 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004466}
4467
Calin Juravle2ae48182016-03-16 14:05:09 +00004468void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4469 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004470 return;
4471 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004472 LocationSummary* locations = instruction->GetLocations();
4473 Location obj = locations->InAt(0);
4474
4475 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004476 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004477}
4478
Calin Juravle2ae48182016-03-16 14:05:09 +00004479void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004480 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004481 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004482
4483 LocationSummary* locations = instruction->GetLocations();
4484 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004485
4486 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004487 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004488 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004489 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004490 } else {
4491 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004492 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004493 __ jmp(slow_path->GetEntryLabel());
4494 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004495 }
4496 __ j(kEqual, slow_path->GetEntryLabel());
4497}
4498
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004499void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004500 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004501}
4502
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004503void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004504 bool object_array_get_with_read_barrier =
4505 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004506 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004507 new (GetGraph()->GetArena()) LocationSummary(instruction,
4508 object_array_get_with_read_barrier ?
4509 LocationSummary::kCallOnSlowPath :
4510 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004511 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004512 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004513 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4514 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4515 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004516 // The output overlaps for an object array get when read barriers
4517 // are enabled: we do not want the move to overwrite the array's
4518 // location, as we need it to emit the read barrier.
4519 locations->SetOut(
4520 Location::RequiresRegister(),
4521 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004522 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004523 // We need a temporary register for the read barrier marking slow
4524 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4525 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4526 locations->AddTemp(Location::RequiresRegister());
4527 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004528}
4529
4530void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4531 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004532 Location obj_loc = locations->InAt(0);
4533 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004534 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004535 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004536
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004537 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004538 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004539 case Primitive::kPrimBoolean: {
4540 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004541 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004542 if (index.IsConstant()) {
4543 __ movzxb(out, Address(obj,
4544 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4545 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004546 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004547 }
4548 break;
4549 }
4550
4551 case Primitive::kPrimByte: {
4552 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004553 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004554 if (index.IsConstant()) {
4555 __ movsxb(out, Address(obj,
4556 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4557 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004558 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004559 }
4560 break;
4561 }
4562
4563 case Primitive::kPrimShort: {
4564 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004565 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004566 if (index.IsConstant()) {
4567 __ movsxw(out, Address(obj,
4568 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4569 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004570 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004571 }
4572 break;
4573 }
4574
4575 case Primitive::kPrimChar: {
4576 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004577 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004578 if (index.IsConstant()) {
4579 __ movzxw(out, Address(obj,
4580 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4581 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004582 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004583 }
4584 break;
4585 }
4586
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004587 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004588 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004589 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004590 if (index.IsConstant()) {
4591 __ movl(out, Address(obj,
4592 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4593 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004594 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004595 }
4596 break;
4597 }
4598
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004599 case Primitive::kPrimNot: {
4600 static_assert(
4601 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4602 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4603 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4604 // /* HeapReference<Object> */ out =
4605 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4606 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4607 Location temp = locations->GetTemp(0);
4608 // Note that a potential implicit null check is handled in this
4609 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4610 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4611 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4612 } else {
4613 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4614 if (index.IsConstant()) {
4615 uint32_t offset =
4616 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4617 __ movl(out, Address(obj, offset));
4618 codegen_->MaybeRecordImplicitNullCheck(instruction);
4619 // If read barriers are enabled, emit read barriers other than
4620 // Baker's using a slow path (and also unpoison the loaded
4621 // reference, if heap poisoning is enabled).
4622 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4623 } else {
4624 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4625 codegen_->MaybeRecordImplicitNullCheck(instruction);
4626 // If read barriers are enabled, emit read barriers other than
4627 // Baker's using a slow path (and also unpoison the loaded
4628 // reference, if heap poisoning is enabled).
4629 codegen_->MaybeGenerateReadBarrierSlow(
4630 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4631 }
4632 }
4633 break;
4634 }
4635
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004636 case Primitive::kPrimLong: {
4637 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004638 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004639 if (index.IsConstant()) {
4640 __ movq(out, Address(obj,
4641 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4642 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004643 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004644 }
4645 break;
4646 }
4647
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004648 case Primitive::kPrimFloat: {
4649 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004650 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004651 if (index.IsConstant()) {
4652 __ movss(out, Address(obj,
4653 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4654 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004655 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004656 }
4657 break;
4658 }
4659
4660 case Primitive::kPrimDouble: {
4661 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004662 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004663 if (index.IsConstant()) {
4664 __ movsd(out, Address(obj,
4665 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4666 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004667 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004668 }
4669 break;
4670 }
4671
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004672 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004673 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004674 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004675 }
Roland Levillain4d027112015-07-01 15:41:14 +01004676
4677 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004678 // Potential implicit null checks, in the case of reference
4679 // arrays, are handled in the previous switch statement.
4680 } else {
4681 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004682 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004683}
4684
4685void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004686 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004687
4688 bool needs_write_barrier =
4689 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004690 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004691 bool object_array_set_with_read_barrier =
4692 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004693
Nicolas Geoffray39468442014-09-02 15:17:15 +01004694 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004695 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004696 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004697 LocationSummary::kCallOnSlowPath :
4698 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004699
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004700 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004701 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4702 if (Primitive::IsFloatingPointType(value_type)) {
4703 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004704 } else {
4705 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4706 }
4707
4708 if (needs_write_barrier) {
4709 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004710
4711 // This first temporary register is possibly used for heap
4712 // reference poisoning and/or read barrier emission too.
4713 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004714 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004715 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004716}
4717
4718void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4719 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004720 Location array_loc = locations->InAt(0);
4721 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004722 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004723 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004724 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004725 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004726 bool needs_write_barrier =
4727 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004728 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4729 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4730 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004731
4732 switch (value_type) {
4733 case Primitive::kPrimBoolean:
4734 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004735 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4736 Address address = index.IsConstant()
4737 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4738 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4739 if (value.IsRegister()) {
4740 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004741 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004742 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004743 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004744 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004745 break;
4746 }
4747
4748 case Primitive::kPrimShort:
4749 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004750 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4751 Address address = index.IsConstant()
4752 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4753 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4754 if (value.IsRegister()) {
4755 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004756 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004757 DCHECK(value.IsConstant()) << value;
4758 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004759 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004760 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004761 break;
4762 }
4763
4764 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004765 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4766 Address address = index.IsConstant()
4767 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4768 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004769
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004770 if (!value.IsRegister()) {
4771 // Just setting null.
4772 DCHECK(instruction->InputAt(2)->IsNullConstant());
4773 DCHECK(value.IsConstant()) << value;
4774 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004775 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004776 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004777 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004778 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004779 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004780
4781 DCHECK(needs_write_barrier);
4782 CpuRegister register_value = value.AsRegister<CpuRegister>();
4783 NearLabel done, not_null, do_put;
4784 SlowPathCode* slow_path = nullptr;
4785 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004786 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004787 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4788 codegen_->AddSlowPath(slow_path);
4789 if (instruction->GetValueCanBeNull()) {
4790 __ testl(register_value, register_value);
4791 __ j(kNotEqual, &not_null);
4792 __ movl(address, Immediate(0));
4793 codegen_->MaybeRecordImplicitNullCheck(instruction);
4794 __ jmp(&done);
4795 __ Bind(&not_null);
4796 }
4797
Roland Levillain0d5a2812015-11-13 10:07:31 +00004798 if (kEmitCompilerReadBarrier) {
4799 // When read barriers are enabled, the type checking
4800 // instrumentation requires two read barriers:
4801 //
4802 // __ movl(temp2, temp);
4803 // // /* HeapReference<Class> */ temp = temp->component_type_
4804 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004805 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004806 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4807 //
4808 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4809 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004810 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004811 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4812 //
4813 // __ cmpl(temp, temp2);
4814 //
4815 // However, the second read barrier may trash `temp`, as it
4816 // is a temporary register, and as such would not be saved
4817 // along with live registers before calling the runtime (nor
4818 // restored afterwards). So in this case, we bail out and
4819 // delegate the work to the array set slow path.
4820 //
4821 // TODO: Extend the register allocator to support a new
4822 // "(locally) live temp" location so as to avoid always
4823 // going into the slow path when read barriers are enabled.
4824 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004825 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004826 // /* HeapReference<Class> */ temp = array->klass_
4827 __ movl(temp, Address(array, class_offset));
4828 codegen_->MaybeRecordImplicitNullCheck(instruction);
4829 __ MaybeUnpoisonHeapReference(temp);
4830
4831 // /* HeapReference<Class> */ temp = temp->component_type_
4832 __ movl(temp, Address(temp, component_offset));
4833 // If heap poisoning is enabled, no need to unpoison `temp`
4834 // nor the object reference in `register_value->klass`, as
4835 // we are comparing two poisoned references.
4836 __ cmpl(temp, Address(register_value, class_offset));
4837
4838 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4839 __ j(kEqual, &do_put);
4840 // If heap poisoning is enabled, the `temp` reference has
4841 // not been unpoisoned yet; unpoison it now.
4842 __ MaybeUnpoisonHeapReference(temp);
4843
4844 // /* HeapReference<Class> */ temp = temp->super_class_
4845 __ movl(temp, Address(temp, super_offset));
4846 // If heap poisoning is enabled, no need to unpoison
4847 // `temp`, as we are comparing against null below.
4848 __ testl(temp, temp);
4849 __ j(kNotEqual, slow_path->GetEntryLabel());
4850 __ Bind(&do_put);
4851 } else {
4852 __ j(kNotEqual, slow_path->GetEntryLabel());
4853 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004854 }
4855 }
4856
4857 if (kPoisonHeapReferences) {
4858 __ movl(temp, register_value);
4859 __ PoisonHeapReference(temp);
4860 __ movl(address, temp);
4861 } else {
4862 __ movl(address, register_value);
4863 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004864 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004865 codegen_->MaybeRecordImplicitNullCheck(instruction);
4866 }
4867
4868 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4869 codegen_->MarkGCCard(
4870 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4871 __ Bind(&done);
4872
4873 if (slow_path != nullptr) {
4874 __ Bind(slow_path->GetExitLabel());
4875 }
4876
4877 break;
4878 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004879
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004880 case Primitive::kPrimInt: {
4881 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4882 Address address = index.IsConstant()
4883 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4884 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4885 if (value.IsRegister()) {
4886 __ movl(address, value.AsRegister<CpuRegister>());
4887 } else {
4888 DCHECK(value.IsConstant()) << value;
4889 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4890 __ movl(address, Immediate(v));
4891 }
4892 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004893 break;
4894 }
4895
4896 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004897 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4898 Address address = index.IsConstant()
4899 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4900 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4901 if (value.IsRegister()) {
4902 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004903 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004904 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004905 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004906 Address address_high = index.IsConstant()
4907 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4908 offset + sizeof(int32_t))
4909 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4910 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004911 }
4912 break;
4913 }
4914
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004915 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004916 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4917 Address address = index.IsConstant()
4918 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4919 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004920 if (value.IsFpuRegister()) {
4921 __ movss(address, value.AsFpuRegister<XmmRegister>());
4922 } else {
4923 DCHECK(value.IsConstant());
4924 int32_t v =
4925 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4926 __ movl(address, Immediate(v));
4927 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004928 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004929 break;
4930 }
4931
4932 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004933 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4934 Address address = index.IsConstant()
4935 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4936 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004937 if (value.IsFpuRegister()) {
4938 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4939 codegen_->MaybeRecordImplicitNullCheck(instruction);
4940 } else {
4941 int64_t v =
4942 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4943 Address address_high = index.IsConstant()
4944 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4945 offset + sizeof(int32_t))
4946 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4947 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4948 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004949 break;
4950 }
4951
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004952 case Primitive::kPrimVoid:
4953 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004954 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004955 }
4956}
4957
4958void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004959 LocationSummary* locations =
4960 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004961 locations->SetInAt(0, Location::RequiresRegister());
4962 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004963}
4964
4965void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
4966 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01004967 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00004968 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4969 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004970 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004971 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004972}
4973
4974void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004975 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4976 ? LocationSummary::kCallOnSlowPath
4977 : LocationSummary::kNoCall;
4978 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004979 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04004980 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004981 if (instruction->HasUses()) {
4982 locations->SetOut(Location::SameAsFirstInput());
4983 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004984}
4985
4986void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4987 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004988 Location index_loc = locations->InAt(0);
4989 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07004990 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004991 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004992
Mark Mendell99dbd682015-04-22 16:18:52 -04004993 if (length_loc.IsConstant()) {
4994 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4995 if (index_loc.IsConstant()) {
4996 // BCE will remove the bounds check if we are guarenteed to pass.
4997 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4998 if (index < 0 || index >= length) {
4999 codegen_->AddSlowPath(slow_path);
5000 __ jmp(slow_path->GetEntryLabel());
5001 } else {
5002 // Some optimization after BCE may have generated this, and we should not
5003 // generate a bounds check if it is a valid range.
5004 }
5005 return;
5006 }
5007
5008 // We have to reverse the jump condition because the length is the constant.
5009 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5010 __ cmpl(index_reg, Immediate(length));
5011 codegen_->AddSlowPath(slow_path);
5012 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005013 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005014 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5015 if (index_loc.IsConstant()) {
5016 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5017 __ cmpl(length, Immediate(value));
5018 } else {
5019 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5020 }
5021 codegen_->AddSlowPath(slow_path);
5022 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005023 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005024}
5025
5026void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5027 CpuRegister card,
5028 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005029 CpuRegister value,
5030 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005031 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005032 if (value_can_be_null) {
5033 __ testl(value, value);
5034 __ j(kEqual, &is_null);
5035 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005036 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5037 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005038 __ movq(temp, object);
5039 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005040 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005041 if (value_can_be_null) {
5042 __ Bind(&is_null);
5043 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005044}
5045
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005046void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005047 LOG(FATAL) << "Unimplemented";
5048}
5049
5050void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005051 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5052}
5053
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005054void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5055 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5056}
5057
5058void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005059 HBasicBlock* block = instruction->GetBlock();
5060 if (block->GetLoopInformation() != nullptr) {
5061 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5062 // The back edge will generate the suspend check.
5063 return;
5064 }
5065 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5066 // The goto will generate the suspend check.
5067 return;
5068 }
5069 GenerateSuspendCheck(instruction, nullptr);
5070}
5071
5072void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5073 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005074 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005075 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5076 if (slow_path == nullptr) {
5077 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5078 instruction->SetSlowPath(slow_path);
5079 codegen_->AddSlowPath(slow_path);
5080 if (successor != nullptr) {
5081 DCHECK(successor->IsLoopHeader());
5082 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5083 }
5084 } else {
5085 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5086 }
5087
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005088 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5089 /* no_rip */ true),
5090 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005091 if (successor == nullptr) {
5092 __ j(kNotEqual, slow_path->GetEntryLabel());
5093 __ Bind(slow_path->GetReturnLabel());
5094 } else {
5095 __ j(kEqual, codegen_->GetLabelOf(successor));
5096 __ jmp(slow_path->GetEntryLabel());
5097 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005098}
5099
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005100X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5101 return codegen_->GetAssembler();
5102}
5103
5104void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005105 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005106 Location source = move->GetSource();
5107 Location destination = move->GetDestination();
5108
5109 if (source.IsRegister()) {
5110 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005111 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005112 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005113 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005114 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005115 } else {
5116 DCHECK(destination.IsDoubleStackSlot());
5117 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005118 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005119 }
5120 } else if (source.IsStackSlot()) {
5121 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005122 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005123 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005124 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005125 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005126 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005127 } else {
5128 DCHECK(destination.IsStackSlot());
5129 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5130 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5131 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005132 } else if (source.IsDoubleStackSlot()) {
5133 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005134 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005135 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005136 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005137 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5138 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005139 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005140 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005141 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5142 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5143 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005144 } else if (source.IsConstant()) {
5145 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005146 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5147 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005148 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005149 if (value == 0) {
5150 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5151 } else {
5152 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5153 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005154 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005155 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005156 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005157 }
5158 } else if (constant->IsLongConstant()) {
5159 int64_t value = constant->AsLongConstant()->GetValue();
5160 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005161 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005162 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005163 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005164 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005165 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005166 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005167 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005168 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005169 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005170 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005171 } else {
5172 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005173 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005174 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5175 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005176 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005177 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005178 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005179 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005180 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005181 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005182 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005183 } else {
5184 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005185 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005186 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005187 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005188 } else if (source.IsFpuRegister()) {
5189 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005190 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005191 } else if (destination.IsStackSlot()) {
5192 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005193 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005194 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005195 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005196 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005197 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005198 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005199 }
5200}
5201
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005202void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005203 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005204 __ movl(Address(CpuRegister(RSP), mem), reg);
5205 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005206}
5207
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005208void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005209 ScratchRegisterScope ensure_scratch(
5210 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5211
5212 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5213 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5214 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5215 Address(CpuRegister(RSP), mem2 + stack_offset));
5216 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5217 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5218 CpuRegister(ensure_scratch.GetRegister()));
5219}
5220
Mark Mendell8a1c7282015-06-29 15:41:28 -04005221void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5222 __ movq(CpuRegister(TMP), reg1);
5223 __ movq(reg1, reg2);
5224 __ movq(reg2, CpuRegister(TMP));
5225}
5226
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005227void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5228 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5229 __ movq(Address(CpuRegister(RSP), mem), reg);
5230 __ movq(reg, CpuRegister(TMP));
5231}
5232
5233void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5234 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005235 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005236
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005237 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5238 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5239 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5240 Address(CpuRegister(RSP), mem2 + stack_offset));
5241 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5242 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5243 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005244}
5245
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005246void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5247 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5248 __ movss(Address(CpuRegister(RSP), mem), reg);
5249 __ movd(reg, CpuRegister(TMP));
5250}
5251
5252void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5253 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5254 __ movsd(Address(CpuRegister(RSP), mem), reg);
5255 __ movd(reg, CpuRegister(TMP));
5256}
5257
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005258void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005259 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005260 Location source = move->GetSource();
5261 Location destination = move->GetDestination();
5262
5263 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005264 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005265 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005266 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005267 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005268 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005269 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005270 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5271 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005272 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005273 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005274 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005275 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5276 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005277 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005278 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5279 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5280 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005281 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005282 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005283 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005284 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005285 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005286 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005287 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005288 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005289 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005290 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005291 }
5292}
5293
5294
5295void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5296 __ pushq(CpuRegister(reg));
5297}
5298
5299
5300void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5301 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005302}
5303
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005304void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005305 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005306 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5307 Immediate(mirror::Class::kStatusInitialized));
5308 __ j(kLess, slow_path->GetEntryLabel());
5309 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005310 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005311}
5312
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005313void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005314 InvokeRuntimeCallingConvention calling_convention;
5315 CodeGenerator::CreateLoadClassLocationSummary(
5316 cls,
5317 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005318 Location::RegisterLocation(RAX),
5319 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005320}
5321
5322void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005323 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005324 if (cls->NeedsAccessCheck()) {
5325 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5326 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5327 cls,
5328 cls->GetDexPc(),
5329 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005330 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005331 return;
5332 }
5333
Roland Levillain0d5a2812015-11-13 10:07:31 +00005334 Location out_loc = locations->Out();
5335 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005336 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005337
Calin Juravle580b6092015-10-06 17:35:58 +01005338 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005339 DCHECK(!cls->CanCallRuntime());
5340 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005341 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5342 GenerateGcRootFieldLoad(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005343 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005344 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005345 // /* GcRoot<mirror::Class>[] */ out =
5346 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5347 __ movq(out, Address(current_method,
5348 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005349 // /* GcRoot<mirror::Class> */ out = out[type_index]
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005350 GenerateGcRootFieldLoad(
5351 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Roland Levillain4d027112015-07-01 15:41:14 +01005352
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005353 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5354 DCHECK(cls->CanCallRuntime());
5355 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5356 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5357 codegen_->AddSlowPath(slow_path);
5358 if (!cls->IsInDexCache()) {
5359 __ testl(out, out);
5360 __ j(kEqual, slow_path->GetEntryLabel());
5361 }
5362 if (cls->MustGenerateClinitCheck()) {
5363 GenerateClassInitializationCheck(slow_path, out);
5364 } else {
5365 __ Bind(slow_path->GetExitLabel());
5366 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005367 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005368 }
5369}
5370
5371void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5372 LocationSummary* locations =
5373 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5374 locations->SetInAt(0, Location::RequiresRegister());
5375 if (check->HasUses()) {
5376 locations->SetOut(Location::SameAsFirstInput());
5377 }
5378}
5379
5380void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005381 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005382 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005383 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005384 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005385 GenerateClassInitializationCheck(slow_path,
5386 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005387}
5388
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005389HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5390 HLoadString::LoadKind desired_string_load_kind) {
5391 if (kEmitCompilerReadBarrier) {
5392 switch (desired_string_load_kind) {
5393 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5394 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5395 case HLoadString::LoadKind::kBootImageAddress:
5396 // TODO: Implement for read barrier.
5397 return HLoadString::LoadKind::kDexCacheViaMethod;
5398 default:
5399 break;
5400 }
5401 }
5402 switch (desired_string_load_kind) {
5403 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5404 DCHECK(!GetCompilerOptions().GetCompilePic());
5405 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5406 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5407 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5408 DCHECK(GetCompilerOptions().GetCompilePic());
5409 break;
5410 case HLoadString::LoadKind::kBootImageAddress:
5411 break;
5412 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005413 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005414 break;
5415 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005416 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005417 break;
5418 case HLoadString::LoadKind::kDexCacheViaMethod:
5419 break;
5420 }
5421 return desired_string_load_kind;
5422}
5423
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005424void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005425 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005426 ? LocationSummary::kCallOnSlowPath
5427 : LocationSummary::kNoCall;
5428 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005429 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5430 locations->SetInAt(0, Location::RequiresRegister());
5431 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005432 locations->SetOut(Location::RequiresRegister());
5433}
5434
5435void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005436 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005437 Location out_loc = locations->Out();
5438 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005439
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005440 switch (load->GetLoadKind()) {
5441 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5442 DCHECK(!kEmitCompilerReadBarrier);
5443 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5444 codegen_->RecordStringPatch(load);
5445 return; // No dex cache slow path.
5446 }
5447 case HLoadString::LoadKind::kBootImageAddress: {
5448 DCHECK(!kEmitCompilerReadBarrier);
5449 DCHECK_NE(load->GetAddress(), 0u);
5450 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5451 __ movl(out, Immediate(address)); // Zero-extended.
5452 codegen_->RecordSimplePatch();
5453 return; // No dex cache slow path.
5454 }
5455 case HLoadString::LoadKind::kDexCacheAddress: {
5456 DCHECK_NE(load->GetAddress(), 0u);
5457 if (IsUint<32>(load->GetAddress())) {
5458 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5459 GenerateGcRootFieldLoad(load, out_loc, address);
5460 } else {
5461 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5462 __ movq(out, Immediate(load->GetAddress()));
5463 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5464 }
5465 break;
5466 }
5467 case HLoadString::LoadKind::kDexCachePcRelative: {
5468 uint32_t offset = load->GetDexCacheElementOffset();
5469 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5470 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5471 /* no_rip */ false);
5472 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5473 break;
5474 }
5475 case HLoadString::LoadKind::kDexCacheViaMethod: {
5476 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5477
5478 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5479 GenerateGcRootFieldLoad(
5480 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5481 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5482 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5483 // /* GcRoot<mirror::String> */ out = out[string_index]
5484 GenerateGcRootFieldLoad(
5485 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5486 break;
5487 }
5488 default:
5489 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5490 UNREACHABLE();
5491 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005492
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005493 if (!load->IsInDexCache()) {
5494 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5495 codegen_->AddSlowPath(slow_path);
5496 __ testl(out, out);
5497 __ j(kEqual, slow_path->GetEntryLabel());
5498 __ Bind(slow_path->GetExitLabel());
5499 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005500}
5501
David Brazdilcb1c0552015-08-04 16:22:25 +01005502static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005503 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5504 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005505}
5506
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005507void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5508 LocationSummary* locations =
5509 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5510 locations->SetOut(Location::RequiresRegister());
5511}
5512
5513void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005514 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5515}
5516
5517void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5518 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5519}
5520
5521void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5522 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005523}
5524
5525void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5526 LocationSummary* locations =
5527 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5528 InvokeRuntimeCallingConvention calling_convention;
5529 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5530}
5531
5532void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005533 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5534 instruction,
5535 instruction->GetDexPc(),
5536 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005537 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005538}
5539
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005540static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5541 return kEmitCompilerReadBarrier &&
5542 (kUseBakerReadBarrier ||
5543 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5544 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5545 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5546}
5547
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005548void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005549 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005550 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5551 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005552 case TypeCheckKind::kExactCheck:
5553 case TypeCheckKind::kAbstractClassCheck:
5554 case TypeCheckKind::kClassHierarchyCheck:
5555 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005556 call_kind =
5557 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005558 break;
5559 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005560 case TypeCheckKind::kUnresolvedCheck:
5561 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005562 call_kind = LocationSummary::kCallOnSlowPath;
5563 break;
5564 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005565
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005566 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005567 locations->SetInAt(0, Location::RequiresRegister());
5568 locations->SetInAt(1, Location::Any());
5569 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5570 locations->SetOut(Location::RequiresRegister());
5571 // When read barriers are enabled, we need a temporary register for
5572 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005573 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005574 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005575 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005576}
5577
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005578void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005579 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005580 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005581 Location obj_loc = locations->InAt(0);
5582 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005583 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005584 Location out_loc = locations->Out();
5585 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005586 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005587 locations->GetTemp(0) :
5588 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005589 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005590 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5591 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5592 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005593 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005594 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005595
5596 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005597 // Avoid null check if we know obj is not null.
5598 if (instruction->MustDoNullCheck()) {
5599 __ testl(obj, obj);
5600 __ j(kEqual, &zero);
5601 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005602
Roland Levillain0d5a2812015-11-13 10:07:31 +00005603 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005604 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005605
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005606 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005607 case TypeCheckKind::kExactCheck: {
5608 if (cls.IsRegister()) {
5609 __ cmpl(out, cls.AsRegister<CpuRegister>());
5610 } else {
5611 DCHECK(cls.IsStackSlot()) << cls;
5612 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5613 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005614 if (zero.IsLinked()) {
5615 // Classes must be equal for the instanceof to succeed.
5616 __ j(kNotEqual, &zero);
5617 __ movl(out, Immediate(1));
5618 __ jmp(&done);
5619 } else {
5620 __ setcc(kEqual, out);
5621 // setcc only sets the low byte.
5622 __ andl(out, Immediate(1));
5623 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005624 break;
5625 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005626
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005627 case TypeCheckKind::kAbstractClassCheck: {
5628 // If the class is abstract, we eagerly fetch the super class of the
5629 // object to avoid doing a comparison we know will fail.
5630 NearLabel loop, success;
5631 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005632 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005633 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005634 __ testl(out, out);
5635 // If `out` is null, we use it for the result, and jump to `done`.
5636 __ j(kEqual, &done);
5637 if (cls.IsRegister()) {
5638 __ cmpl(out, cls.AsRegister<CpuRegister>());
5639 } else {
5640 DCHECK(cls.IsStackSlot()) << cls;
5641 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5642 }
5643 __ j(kNotEqual, &loop);
5644 __ movl(out, Immediate(1));
5645 if (zero.IsLinked()) {
5646 __ jmp(&done);
5647 }
5648 break;
5649 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005650
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005651 case TypeCheckKind::kClassHierarchyCheck: {
5652 // Walk over the class hierarchy to find a match.
5653 NearLabel loop, success;
5654 __ Bind(&loop);
5655 if (cls.IsRegister()) {
5656 __ cmpl(out, cls.AsRegister<CpuRegister>());
5657 } else {
5658 DCHECK(cls.IsStackSlot()) << cls;
5659 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5660 }
5661 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005662 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005663 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005664 __ testl(out, out);
5665 __ j(kNotEqual, &loop);
5666 // If `out` is null, we use it for the result, and jump to `done`.
5667 __ jmp(&done);
5668 __ Bind(&success);
5669 __ movl(out, Immediate(1));
5670 if (zero.IsLinked()) {
5671 __ jmp(&done);
5672 }
5673 break;
5674 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005675
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005676 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005677 // Do an exact check.
5678 NearLabel exact_check;
5679 if (cls.IsRegister()) {
5680 __ cmpl(out, cls.AsRegister<CpuRegister>());
5681 } else {
5682 DCHECK(cls.IsStackSlot()) << cls;
5683 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5684 }
5685 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005686 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005687 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005688 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005689 __ testl(out, out);
5690 // If `out` is null, we use it for the result, and jump to `done`.
5691 __ j(kEqual, &done);
5692 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5693 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005694 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005695 __ movl(out, Immediate(1));
5696 __ jmp(&done);
5697 break;
5698 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005699
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005700 case TypeCheckKind::kArrayCheck: {
5701 if (cls.IsRegister()) {
5702 __ cmpl(out, cls.AsRegister<CpuRegister>());
5703 } else {
5704 DCHECK(cls.IsStackSlot()) << cls;
5705 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5706 }
5707 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005708 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5709 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005710 codegen_->AddSlowPath(slow_path);
5711 __ j(kNotEqual, slow_path->GetEntryLabel());
5712 __ movl(out, Immediate(1));
5713 if (zero.IsLinked()) {
5714 __ jmp(&done);
5715 }
5716 break;
5717 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005718
Calin Juravle98893e12015-10-02 21:05:03 +01005719 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005720 case TypeCheckKind::kInterfaceCheck: {
5721 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005722 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005723 // cases.
5724 //
5725 // We cannot directly call the InstanceofNonTrivial runtime
5726 // entry point without resorting to a type checking slow path
5727 // here (i.e. by calling InvokeRuntime directly), as it would
5728 // require to assign fixed registers for the inputs of this
5729 // HInstanceOf instruction (following the runtime calling
5730 // convention), which might be cluttered by the potential first
5731 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005732 //
5733 // TODO: Introduce a new runtime entry point taking the object
5734 // to test (instead of its class) as argument, and let it deal
5735 // with the read barrier issues. This will let us refactor this
5736 // case of the `switch` code as it was previously (with a direct
5737 // call to the runtime not using a type checking slow path).
5738 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005739 DCHECK(locations->OnlyCallsOnSlowPath());
5740 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5741 /* is_fatal */ false);
5742 codegen_->AddSlowPath(slow_path);
5743 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005744 if (zero.IsLinked()) {
5745 __ jmp(&done);
5746 }
5747 break;
5748 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005749 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005750
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005751 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005752 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005753 __ xorl(out, out);
5754 }
5755
5756 if (done.IsLinked()) {
5757 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005758 }
5759
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005760 if (slow_path != nullptr) {
5761 __ Bind(slow_path->GetExitLabel());
5762 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005763}
5764
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005765void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005766 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5767 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005768 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5769 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005770 case TypeCheckKind::kExactCheck:
5771 case TypeCheckKind::kAbstractClassCheck:
5772 case TypeCheckKind::kClassHierarchyCheck:
5773 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5775 LocationSummary::kCallOnSlowPath :
5776 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005777 break;
5778 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005779 case TypeCheckKind::kUnresolvedCheck:
5780 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005781 call_kind = LocationSummary::kCallOnSlowPath;
5782 break;
5783 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005784 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5785 locations->SetInAt(0, Location::RequiresRegister());
5786 locations->SetInAt(1, Location::Any());
5787 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5788 locations->AddTemp(Location::RequiresRegister());
5789 // When read barriers are enabled, we need an additional temporary
5790 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005791 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005792 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005793 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005794}
5795
5796void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005797 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005798 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005799 Location obj_loc = locations->InAt(0);
5800 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005801 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005802 Location temp_loc = locations->GetTemp(0);
5803 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005804 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005805 locations->GetTemp(1) :
5806 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005807 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5808 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5809 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5810 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005811
Roland Levillain0d5a2812015-11-13 10:07:31 +00005812 bool is_type_check_slow_path_fatal =
5813 (type_check_kind == TypeCheckKind::kExactCheck ||
5814 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5815 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5816 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5817 !instruction->CanThrowIntoCatchBlock();
5818 SlowPathCode* type_check_slow_path =
5819 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5820 is_type_check_slow_path_fatal);
5821 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005822
Roland Levillain0d5a2812015-11-13 10:07:31 +00005823 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005824 case TypeCheckKind::kExactCheck:
5825 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005826 NearLabel done;
5827 // Avoid null check if we know obj is not null.
5828 if (instruction->MustDoNullCheck()) {
5829 __ testl(obj, obj);
5830 __ j(kEqual, &done);
5831 }
5832
5833 // /* HeapReference<Class> */ temp = obj->klass_
5834 GenerateReferenceLoadTwoRegisters(
5835 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5836
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005837 if (cls.IsRegister()) {
5838 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5839 } else {
5840 DCHECK(cls.IsStackSlot()) << cls;
5841 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5842 }
5843 // Jump to slow path for throwing the exception or doing a
5844 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005845 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005846 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005847 break;
5848 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005849
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005850 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005851 NearLabel done;
5852 // Avoid null check if we know obj is not null.
5853 if (instruction->MustDoNullCheck()) {
5854 __ testl(obj, obj);
5855 __ j(kEqual, &done);
5856 }
5857
5858 // /* HeapReference<Class> */ temp = obj->klass_
5859 GenerateReferenceLoadTwoRegisters(
5860 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5861
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005862 // If the class is abstract, we eagerly fetch the super class of the
5863 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005864 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005865 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005866 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005867 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005868
5869 // If the class reference currently in `temp` is not null, jump
5870 // to the `compare_classes` label to compare it with the checked
5871 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005872 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005873 __ j(kNotEqual, &compare_classes);
5874 // Otherwise, jump to the slow path to throw the exception.
5875 //
5876 // But before, move back the object's class into `temp` before
5877 // going into the slow path, as it has been overwritten in the
5878 // meantime.
5879 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005880 GenerateReferenceLoadTwoRegisters(
5881 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005882 __ jmp(type_check_slow_path->GetEntryLabel());
5883
5884 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005885 if (cls.IsRegister()) {
5886 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5887 } else {
5888 DCHECK(cls.IsStackSlot()) << cls;
5889 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5890 }
5891 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005892 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005893 break;
5894 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005895
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005896 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005897 NearLabel done;
5898 // Avoid null check if we know obj is not null.
5899 if (instruction->MustDoNullCheck()) {
5900 __ testl(obj, obj);
5901 __ j(kEqual, &done);
5902 }
5903
5904 // /* HeapReference<Class> */ temp = obj->klass_
5905 GenerateReferenceLoadTwoRegisters(
5906 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5907
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005908 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005909 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005910 __ Bind(&loop);
5911 if (cls.IsRegister()) {
5912 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5913 } else {
5914 DCHECK(cls.IsStackSlot()) << cls;
5915 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5916 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005917 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005918
Roland Levillain0d5a2812015-11-13 10:07:31 +00005919 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005920 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005921
5922 // If the class reference currently in `temp` is not null, jump
5923 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005924 __ testl(temp, temp);
5925 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005926 // Otherwise, jump to the slow path to throw the exception.
5927 //
5928 // But before, move back the object's class into `temp` before
5929 // going into the slow path, as it has been overwritten in the
5930 // meantime.
5931 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005932 GenerateReferenceLoadTwoRegisters(
5933 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005934 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005935 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005936 break;
5937 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005938
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005939 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005940 // We cannot use a NearLabel here, as its range might be too
5941 // short in some cases when read barriers are enabled. This has
5942 // been observed for instance when the code emitted for this
5943 // case uses high x86-64 registers (R8-R15).
5944 Label done;
5945 // Avoid null check if we know obj is not null.
5946 if (instruction->MustDoNullCheck()) {
5947 __ testl(obj, obj);
5948 __ j(kEqual, &done);
5949 }
5950
5951 // /* HeapReference<Class> */ temp = obj->klass_
5952 GenerateReferenceLoadTwoRegisters(
5953 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5954
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005955 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005956 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005957 if (cls.IsRegister()) {
5958 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5959 } else {
5960 DCHECK(cls.IsStackSlot()) << cls;
5961 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5962 }
5963 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005964
5965 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005966 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005967 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005968
5969 // If the component type is not null (i.e. the object is indeed
5970 // an array), jump to label `check_non_primitive_component_type`
5971 // to further check that this component type is not a primitive
5972 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005973 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005974 __ j(kNotEqual, &check_non_primitive_component_type);
5975 // Otherwise, jump to the slow path to throw the exception.
5976 //
5977 // But before, move back the object's class into `temp` before
5978 // going into the slow path, as it has been overwritten in the
5979 // meantime.
5980 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005981 GenerateReferenceLoadTwoRegisters(
5982 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005983 __ jmp(type_check_slow_path->GetEntryLabel());
5984
5985 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005986 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005987 __ j(kEqual, &done);
5988 // Same comment as above regarding `temp` and the slow path.
5989 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005990 GenerateReferenceLoadTwoRegisters(
5991 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005992 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005993 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005994 break;
5995 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005996
Calin Juravle98893e12015-10-02 21:05:03 +01005997 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005998 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00005999 NearLabel done;
6000 // Avoid null check if we know obj is not null.
6001 if (instruction->MustDoNullCheck()) {
6002 __ testl(obj, obj);
6003 __ j(kEqual, &done);
6004 }
6005
6006 // /* HeapReference<Class> */ temp = obj->klass_
6007 GenerateReferenceLoadTwoRegisters(
6008 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6009
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006010 // We always go into the type check slow path for the unresolved
6011 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006012 //
6013 // We cannot directly call the CheckCast runtime entry point
6014 // without resorting to a type checking slow path here (i.e. by
6015 // calling InvokeRuntime directly), as it would require to
6016 // assign fixed registers for the inputs of this HInstanceOf
6017 // instruction (following the runtime calling convention), which
6018 // might be cluttered by the potential first read barrier
6019 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006020 //
6021 // TODO: Introduce a new runtime entry point taking the object
6022 // to test (instead of its class) as argument, and let it deal
6023 // with the read barrier issues. This will let us refactor this
6024 // case of the `switch` code as it was previously (with a direct
6025 // call to the runtime not using a type checking slow path).
6026 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006027 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006028 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006029 break;
6030 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006031
Roland Levillain0d5a2812015-11-13 10:07:31 +00006032 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006033}
6034
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006035void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6036 LocationSummary* locations =
6037 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
6038 InvokeRuntimeCallingConvention calling_convention;
6039 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6040}
6041
6042void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006043 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6044 : QUICK_ENTRY_POINT(pUnlockObject),
6045 instruction,
6046 instruction->GetDexPc(),
6047 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006048 if (instruction->IsEnter()) {
6049 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6050 } else {
6051 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6052 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006053}
6054
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006055void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6056void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6057void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6058
6059void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6060 LocationSummary* locations =
6061 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6062 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6063 || instruction->GetResultType() == Primitive::kPrimLong);
6064 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006065 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006066 locations->SetOut(Location::SameAsFirstInput());
6067}
6068
6069void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6070 HandleBitwiseOperation(instruction);
6071}
6072
6073void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6074 HandleBitwiseOperation(instruction);
6075}
6076
6077void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6078 HandleBitwiseOperation(instruction);
6079}
6080
6081void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6082 LocationSummary* locations = instruction->GetLocations();
6083 Location first = locations->InAt(0);
6084 Location second = locations->InAt(1);
6085 DCHECK(first.Equals(locations->Out()));
6086
6087 if (instruction->GetResultType() == Primitive::kPrimInt) {
6088 if (second.IsRegister()) {
6089 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006090 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006091 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006092 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006093 } else {
6094 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006095 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006096 }
6097 } else if (second.IsConstant()) {
6098 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6099 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006100 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006101 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006102 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006103 } else {
6104 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006105 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006106 }
6107 } else {
6108 Address address(CpuRegister(RSP), second.GetStackIndex());
6109 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006110 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006111 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006112 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006113 } else {
6114 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006115 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006116 }
6117 }
6118 } else {
6119 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006120 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6121 bool second_is_constant = false;
6122 int64_t value = 0;
6123 if (second.IsConstant()) {
6124 second_is_constant = true;
6125 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006126 }
Mark Mendell40741f32015-04-20 22:10:34 -04006127 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006128
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006129 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006130 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006131 if (is_int32_value) {
6132 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6133 } else {
6134 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6135 }
6136 } else if (second.IsDoubleStackSlot()) {
6137 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006138 } else {
6139 __ andq(first_reg, second.AsRegister<CpuRegister>());
6140 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006141 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006142 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006143 if (is_int32_value) {
6144 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6145 } else {
6146 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6147 }
6148 } else if (second.IsDoubleStackSlot()) {
6149 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006150 } else {
6151 __ orq(first_reg, second.AsRegister<CpuRegister>());
6152 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006153 } else {
6154 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006155 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006156 if (is_int32_value) {
6157 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6158 } else {
6159 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6160 }
6161 } else if (second.IsDoubleStackSlot()) {
6162 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006163 } else {
6164 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6165 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006166 }
6167 }
6168}
6169
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006170void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6171 Location out,
6172 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006173 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006174 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6175 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006176 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006177 if (kUseBakerReadBarrier) {
6178 // Load with fast path based Baker's read barrier.
6179 // /* HeapReference<Object> */ out = *(out + offset)
6180 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006181 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006182 } else {
6183 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006184 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006185 // in the following move operation, as we will need it for the
6186 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006187 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006188 // /* HeapReference<Object> */ out = *(out + offset)
6189 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006190 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006191 }
6192 } else {
6193 // Plain load with no read barrier.
6194 // /* HeapReference<Object> */ out = *(out + offset)
6195 __ movl(out_reg, Address(out_reg, offset));
6196 __ MaybeUnpoisonHeapReference(out_reg);
6197 }
6198}
6199
6200void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6201 Location out,
6202 Location obj,
6203 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006204 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006205 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6206 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6207 if (kEmitCompilerReadBarrier) {
6208 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006209 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006210 // Load with fast path based Baker's read barrier.
6211 // /* HeapReference<Object> */ out = *(obj + offset)
6212 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006213 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006214 } else {
6215 // Load with slow path based read barrier.
6216 // /* HeapReference<Object> */ out = *(obj + offset)
6217 __ movl(out_reg, Address(obj_reg, offset));
6218 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6219 }
6220 } else {
6221 // Plain load with no read barrier.
6222 // /* HeapReference<Object> */ out = *(obj + offset)
6223 __ movl(out_reg, Address(obj_reg, offset));
6224 __ MaybeUnpoisonHeapReference(out_reg);
6225 }
6226}
6227
6228void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6229 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006230 const Address& address,
6231 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006232 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6233 if (kEmitCompilerReadBarrier) {
6234 if (kUseBakerReadBarrier) {
6235 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6236 // Baker's read barrier are used:
6237 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006238 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006239 // if (Thread::Current()->GetIsGcMarking()) {
6240 // root = ReadBarrier::Mark(root)
6241 // }
6242
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006243 // /* GcRoot<mirror::Object> */ root = *address
6244 __ movl(root_reg, address);
6245 if (fixup_label != nullptr) {
6246 __ Bind(fixup_label);
6247 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006248 static_assert(
6249 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6250 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6251 "have different sizes.");
6252 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6253 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6254 "have different sizes.");
6255
6256 // Slow path used to mark the GC root `root`.
6257 SlowPathCode* slow_path =
6258 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6259 codegen_->AddSlowPath(slow_path);
6260
6261 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6262 /* no_rip */ true),
6263 Immediate(0));
6264 __ j(kNotEqual, slow_path->GetEntryLabel());
6265 __ Bind(slow_path->GetExitLabel());
6266 } else {
6267 // GC root loaded through a slow path for read barriers other
6268 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006269 // /* GcRoot<mirror::Object>* */ root = address
6270 __ leaq(root_reg, address);
6271 if (fixup_label != nullptr) {
6272 __ Bind(fixup_label);
6273 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006274 // /* mirror::Object* */ root = root->Read()
6275 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6276 }
6277 } else {
6278 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006279 // /* GcRoot<mirror::Object> */ root = *address
6280 __ movl(root_reg, address);
6281 if (fixup_label != nullptr) {
6282 __ Bind(fixup_label);
6283 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006284 // Note that GC roots are not affected by heap poisoning, thus we
6285 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006286 }
6287}
6288
6289void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6290 Location ref,
6291 CpuRegister obj,
6292 uint32_t offset,
6293 Location temp,
6294 bool needs_null_check) {
6295 DCHECK(kEmitCompilerReadBarrier);
6296 DCHECK(kUseBakerReadBarrier);
6297
6298 // /* HeapReference<Object> */ ref = *(obj + offset)
6299 Address src(obj, offset);
6300 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6301}
6302
6303void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6304 Location ref,
6305 CpuRegister obj,
6306 uint32_t data_offset,
6307 Location index,
6308 Location temp,
6309 bool needs_null_check) {
6310 DCHECK(kEmitCompilerReadBarrier);
6311 DCHECK(kUseBakerReadBarrier);
6312
6313 // /* HeapReference<Object> */ ref =
6314 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6315 Address src = index.IsConstant() ?
6316 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6317 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6318 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6319}
6320
6321void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6322 Location ref,
6323 CpuRegister obj,
6324 const Address& src,
6325 Location temp,
6326 bool needs_null_check) {
6327 DCHECK(kEmitCompilerReadBarrier);
6328 DCHECK(kUseBakerReadBarrier);
6329
6330 // In slow path based read barriers, the read barrier call is
6331 // inserted after the original load. However, in fast path based
6332 // Baker's read barriers, we need to perform the load of
6333 // mirror::Object::monitor_ *before* the original reference load.
6334 // This load-load ordering is required by the read barrier.
6335 // The fast path/slow path (for Baker's algorithm) should look like:
6336 //
6337 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6338 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6339 // HeapReference<Object> ref = *src; // Original reference load.
6340 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6341 // if (is_gray) {
6342 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6343 // }
6344 //
6345 // Note: the original implementation in ReadBarrier::Barrier is
6346 // slightly more complex as:
6347 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006348 // the high-bits of rb_state, which are expected to be all zeroes
6349 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6350 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006351 // - it performs additional checks that we do not do here for
6352 // performance reasons.
6353
6354 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6355 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6356 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6357
6358 // /* int32_t */ monitor = obj->monitor_
6359 __ movl(temp_reg, Address(obj, monitor_offset));
6360 if (needs_null_check) {
6361 MaybeRecordImplicitNullCheck(instruction);
6362 }
6363 // /* LockWord */ lock_word = LockWord(monitor)
6364 static_assert(sizeof(LockWord) == sizeof(int32_t),
6365 "art::LockWord and int32_t have different sizes.");
6366 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6367 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6368 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6369 static_assert(
6370 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6371 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6372
6373 // Load fence to prevent load-load reordering.
6374 // Note that this is a no-op, thanks to the x86-64 memory model.
6375 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6376
6377 // The actual reference load.
6378 // /* HeapReference<Object> */ ref = *src
6379 __ movl(ref_reg, src);
6380
6381 // Object* ref = ref_addr->AsMirrorPtr()
6382 __ MaybeUnpoisonHeapReference(ref_reg);
6383
6384 // Slow path used to mark the object `ref` when it is gray.
6385 SlowPathCode* slow_path =
6386 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6387 AddSlowPath(slow_path);
6388
6389 // if (rb_state == ReadBarrier::gray_ptr_)
6390 // ref = ReadBarrier::Mark(ref);
6391 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6392 __ j(kEqual, slow_path->GetEntryLabel());
6393 __ Bind(slow_path->GetExitLabel());
6394}
6395
6396void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6397 Location out,
6398 Location ref,
6399 Location obj,
6400 uint32_t offset,
6401 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006402 DCHECK(kEmitCompilerReadBarrier);
6403
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006404 // Insert a slow path based read barrier *after* the reference load.
6405 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006406 // If heap poisoning is enabled, the unpoisoning of the loaded
6407 // reference will be carried out by the runtime within the slow
6408 // path.
6409 //
6410 // Note that `ref` currently does not get unpoisoned (when heap
6411 // poisoning is enabled), which is alright as the `ref` argument is
6412 // not used by the artReadBarrierSlow entry point.
6413 //
6414 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6415 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6416 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6417 AddSlowPath(slow_path);
6418
Roland Levillain0d5a2812015-11-13 10:07:31 +00006419 __ jmp(slow_path->GetEntryLabel());
6420 __ Bind(slow_path->GetExitLabel());
6421}
6422
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006423void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6424 Location out,
6425 Location ref,
6426 Location obj,
6427 uint32_t offset,
6428 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006429 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006430 // Baker's read barriers shall be handled by the fast path
6431 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6432 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006433 // If heap poisoning is enabled, unpoisoning will be taken care of
6434 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006435 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006436 } else if (kPoisonHeapReferences) {
6437 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6438 }
6439}
6440
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006441void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6442 Location out,
6443 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006444 DCHECK(kEmitCompilerReadBarrier);
6445
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006446 // Insert a slow path based read barrier *after* the GC root load.
6447 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006448 // Note that GC roots are not affected by heap poisoning, so we do
6449 // not need to do anything special for this here.
6450 SlowPathCode* slow_path =
6451 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6452 AddSlowPath(slow_path);
6453
Roland Levillain0d5a2812015-11-13 10:07:31 +00006454 __ jmp(slow_path->GetEntryLabel());
6455 __ Bind(slow_path->GetExitLabel());
6456}
6457
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006458void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006459 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006460 LOG(FATAL) << "Unreachable";
6461}
6462
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006463void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006464 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006465 LOG(FATAL) << "Unreachable";
6466}
6467
Mark Mendellfe57faa2015-09-18 09:26:15 -04006468// Simple implementation of packed switch - generate cascaded compare/jumps.
6469void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6470 LocationSummary* locations =
6471 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6472 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006473 locations->AddTemp(Location::RequiresRegister());
6474 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006475}
6476
6477void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6478 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006479 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006480 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006481 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6482 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6483 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006484 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6485
6486 // Should we generate smaller inline compare/jumps?
6487 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6488 // Figure out the correct compare values and jump conditions.
6489 // Handle the first compare/branch as a special case because it might
6490 // jump to the default case.
6491 DCHECK_GT(num_entries, 2u);
6492 Condition first_condition;
6493 uint32_t index;
6494 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6495 if (lower_bound != 0) {
6496 first_condition = kLess;
6497 __ cmpl(value_reg_in, Immediate(lower_bound));
6498 __ j(first_condition, codegen_->GetLabelOf(default_block));
6499 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6500
6501 index = 1;
6502 } else {
6503 // Handle all the compare/jumps below.
6504 first_condition = kBelow;
6505 index = 0;
6506 }
6507
6508 // Handle the rest of the compare/jumps.
6509 for (; index + 1 < num_entries; index += 2) {
6510 int32_t compare_to_value = lower_bound + index + 1;
6511 __ cmpl(value_reg_in, Immediate(compare_to_value));
6512 // Jump to successors[index] if value < case_value[index].
6513 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6514 // Jump to successors[index + 1] if value == case_value[index + 1].
6515 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6516 }
6517
6518 if (index != num_entries) {
6519 // There are an odd number of entries. Handle the last one.
6520 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006521 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006522 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6523 }
6524
6525 // And the default for any other value.
6526 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6527 __ jmp(codegen_->GetLabelOf(default_block));
6528 }
6529 return;
6530 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006531
6532 // Remove the bias, if needed.
6533 Register value_reg_out = value_reg_in.AsRegister();
6534 if (lower_bound != 0) {
6535 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6536 value_reg_out = temp_reg.AsRegister();
6537 }
6538 CpuRegister value_reg(value_reg_out);
6539
6540 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006541 __ cmpl(value_reg, Immediate(num_entries - 1));
6542 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006543
Mark Mendell9c86b482015-09-18 13:36:07 -04006544 // We are in the range of the table.
6545 // Load the address of the jump table in the constant area.
6546 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006547
Mark Mendell9c86b482015-09-18 13:36:07 -04006548 // Load the (signed) offset from the jump table.
6549 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6550
6551 // Add the offset to the address of the table base.
6552 __ addq(temp_reg, base_reg);
6553
6554 // And jump.
6555 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006556}
6557
Aart Bikc5d47542016-01-27 17:00:35 -08006558void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6559 if (value == 0) {
6560 __ xorl(dest, dest);
6561 } else {
6562 __ movl(dest, Immediate(value));
6563 }
6564}
6565
Mark Mendell92e83bf2015-05-07 11:25:03 -04006566void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6567 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006568 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006569 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006570 } else if (IsUint<32>(value)) {
6571 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006572 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6573 } else {
6574 __ movq(dest, Immediate(value));
6575 }
6576}
6577
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006578void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6579 if (value == 0) {
6580 __ xorps(dest, dest);
6581 } else {
6582 __ movss(dest, LiteralInt32Address(value));
6583 }
6584}
6585
6586void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6587 if (value == 0) {
6588 __ xorpd(dest, dest);
6589 } else {
6590 __ movsd(dest, LiteralInt64Address(value));
6591 }
6592}
6593
6594void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6595 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6596}
6597
6598void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6599 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6600}
6601
Aart Bika19616e2016-02-01 18:57:58 -08006602void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6603 if (value == 0) {
6604 __ testl(dest, dest);
6605 } else {
6606 __ cmpl(dest, Immediate(value));
6607 }
6608}
6609
6610void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6611 if (IsInt<32>(value)) {
6612 if (value == 0) {
6613 __ testq(dest, dest);
6614 } else {
6615 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6616 }
6617 } else {
6618 // Value won't fit in an int.
6619 __ cmpq(dest, LiteralInt64Address(value));
6620 }
6621}
6622
Mark Mendellcfa410b2015-05-25 16:02:44 -04006623void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6624 DCHECK(dest.IsDoubleStackSlot());
6625 if (IsInt<32>(value)) {
6626 // Can move directly as an int32 constant.
6627 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6628 Immediate(static_cast<int32_t>(value)));
6629 } else {
6630 Load64BitValue(CpuRegister(TMP), value);
6631 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6632 }
6633}
6634
Mark Mendell9c86b482015-09-18 13:36:07 -04006635/**
6636 * Class to handle late fixup of offsets into constant area.
6637 */
6638class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6639 public:
6640 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6641 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6642
6643 protected:
6644 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6645
6646 CodeGeneratorX86_64* codegen_;
6647
6648 private:
6649 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6650 // Patch the correct offset for the instruction. We use the address of the
6651 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6652 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6653 int32_t relative_position = constant_offset - pos;
6654
6655 // Patch in the right value.
6656 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6657 }
6658
6659 // Location in constant area that the fixup refers to.
6660 size_t offset_into_constant_area_;
6661};
6662
6663/**
6664 t * Class to handle late fixup of offsets to a jump table that will be created in the
6665 * constant area.
6666 */
6667class JumpTableRIPFixup : public RIPFixup {
6668 public:
6669 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6670 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6671
6672 void CreateJumpTable() {
6673 X86_64Assembler* assembler = codegen_->GetAssembler();
6674
6675 // Ensure that the reference to the jump table has the correct offset.
6676 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6677 SetOffset(offset_in_constant_table);
6678
6679 // Compute the offset from the start of the function to this jump table.
6680 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6681
6682 // Populate the jump table with the correct values for the jump table.
6683 int32_t num_entries = switch_instr_->GetNumEntries();
6684 HBasicBlock* block = switch_instr_->GetBlock();
6685 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6686 // The value that we want is the target offset - the position of the table.
6687 for (int32_t i = 0; i < num_entries; i++) {
6688 HBasicBlock* b = successors[i];
6689 Label* l = codegen_->GetLabelOf(b);
6690 DCHECK(l->IsBound());
6691 int32_t offset_to_block = l->Position() - current_table_offset;
6692 assembler->AppendInt32(offset_to_block);
6693 }
6694 }
6695
6696 private:
6697 const HPackedSwitch* switch_instr_;
6698};
6699
Mark Mendellf55c3e02015-03-26 21:07:46 -04006700void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6701 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006702 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006703 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6704 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006705 assembler->Align(4, 0);
6706 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006707
6708 // Populate any jump tables.
6709 for (auto jump_table : fixups_to_jump_tables_) {
6710 jump_table->CreateJumpTable();
6711 }
6712
6713 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006714 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006715 }
6716
6717 // And finish up.
6718 CodeGenerator::Finalize(allocator);
6719}
6720
Mark Mendellf55c3e02015-03-26 21:07:46 -04006721Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6722 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6723 return Address::RIP(fixup);
6724}
6725
6726Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6727 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6728 return Address::RIP(fixup);
6729}
6730
6731Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6732 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6733 return Address::RIP(fixup);
6734}
6735
6736Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6737 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6738 return Address::RIP(fixup);
6739}
6740
Andreas Gampe85b62f22015-09-09 13:15:38 -07006741// TODO: trg as memory.
6742void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6743 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006744 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006745 return;
6746 }
6747
6748 DCHECK_NE(type, Primitive::kPrimVoid);
6749
6750 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6751 if (trg.Equals(return_loc)) {
6752 return;
6753 }
6754
6755 // Let the parallel move resolver take care of all of this.
6756 HParallelMove parallel_move(GetGraph()->GetArena());
6757 parallel_move.AddMove(return_loc, trg, type, nullptr);
6758 GetMoveResolver()->EmitNativeCode(&parallel_move);
6759}
6760
Mark Mendell9c86b482015-09-18 13:36:07 -04006761Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6762 // Create a fixup to be used to create and address the jump table.
6763 JumpTableRIPFixup* table_fixup =
6764 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6765
6766 // We have to populate the jump tables.
6767 fixups_to_jump_tables_.push_back(table_fixup);
6768 return Address::RIP(table_fixup);
6769}
6770
Mark Mendellea5af682015-10-22 17:35:49 -04006771void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6772 const Address& addr_high,
6773 int64_t v,
6774 HInstruction* instruction) {
6775 if (IsInt<32>(v)) {
6776 int32_t v_32 = v;
6777 __ movq(addr_low, Immediate(v_32));
6778 MaybeRecordImplicitNullCheck(instruction);
6779 } else {
6780 // Didn't fit in a register. Do it in pieces.
6781 int32_t low_v = Low32Bits(v);
6782 int32_t high_v = High32Bits(v);
6783 __ movl(addr_low, Immediate(low_v));
6784 MaybeRecordImplicitNullCheck(instruction);
6785 __ movl(addr_high, Immediate(high_v));
6786 }
6787}
6788
Roland Levillain4d027112015-07-01 15:41:14 +01006789#undef __
6790
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006791} // namespace x86_64
6792} // namespace art