blob: 3f8a32aeda65c3c473b9987ff61ff1884fac2c42 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Calin Juravle175dc732015-08-25 15:42:32 +010056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000197 // We're moving two locations to locations that could overlap, so we need a parallel
198 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100199 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000200 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100201 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000202 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100203 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100204 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100205 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
206 Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100207 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
208 ? QUICK_ENTRY_POINT(pThrowStringBounds)
209 : QUICK_ENTRY_POINT(pThrowArrayBounds);
210 x86_64_codegen->InvokeRuntime(entry_point_offset,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000211 instruction_,
212 instruction_->GetDexPc(),
213 this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100214 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000215 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100216 }
217
Alexandre Rames8158f282015-08-07 10:26:17 +0100218 bool IsFatal() const OVERRIDE { return true; }
219
Alexandre Rames9931f312015-06-19 14:47:01 +0100220 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
221
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100222 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100223 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
224};
225
Andreas Gampe85b62f22015-09-09 13:15:38 -0700226class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100227 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000228 LoadClassSlowPathX86_64(HLoadClass* cls,
229 HInstruction* at,
230 uint32_t dex_pc,
231 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000232 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
234 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000236 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000237 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000238 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000241 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100243 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000244 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000245 x86_64_codegen->InvokeRuntime(do_clinit_ ?
246 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
247 QUICK_ENTRY_POINT(pInitializeType),
248 at_,
249 dex_pc_,
250 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000251 if (do_clinit_) {
252 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
253 } else {
254 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000257 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000259 if (out.IsValid()) {
260 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000261 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 }
263
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ jmp(GetExitLabel());
266 }
267
Alexandre Rames9931f312015-06-19 14:47:01 +0100268 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
269
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100270 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 // The class this slow path will load.
272 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The instruction where this slow path is happening.
275 // (Might be the load class or an initialization check).
276 HInstruction* const at_;
277
278 // The dex PC of `at_`.
279 const uint32_t dex_pc_;
280
281 // Whether to initialize the class.
282 const bool do_clinit_;
283
284 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285};
286
Andreas Gampe85b62f22015-09-09 13:15:38 -0700287class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000289 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000290
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 LocationSummary* locations = instruction_->GetLocations();
293 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
294
Roland Levillain0d5a2812015-11-13 10:07:31 +0000295 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000296 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000297 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000298
299 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000300 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
301 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000302 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
303 instruction_,
304 instruction_->GetDexPc(),
305 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000306 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000307 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000308 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 __ jmp(GetExitLabel());
310 }
311
Alexandre Rames9931f312015-06-19 14:47:01 +0100312 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
313
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000314 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000315 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
316};
317
Andreas Gampe85b62f22015-09-09 13:15:38 -0700318class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000319 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000320 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000321 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000322
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000323 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000324 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100325 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
326 : locations->Out();
327 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000328 DCHECK(instruction_->IsCheckCast()
329 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330
Roland Levillain0d5a2812015-11-13 10:07:31 +0000331 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000333
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000334 if (!is_fatal_) {
335 SaveLiveRegisters(codegen, locations);
336 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000337
338 // We're moving two locations to locations that could overlap, so we need a parallel
339 // move resolver.
340 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000341 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100342 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000343 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100344 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100345 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100346 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
347 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000348
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000349 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000350 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
351 instruction_,
352 dex_pc,
353 this);
354 CheckEntrypointTypes<
355 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000356 } else {
357 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000358 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
359 instruction_,
360 dex_pc,
361 this);
362 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 if (!is_fatal_) {
366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 RestoreLiveRegisters(codegen, locations);
371 __ jmp(GetExitLabel());
372 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000373 }
374
Alexandre Rames9931f312015-06-19 14:47:01 +0100375 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 bool IsFatal() const OVERRIDE { return is_fatal_; }
378
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000379 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381
382 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
383};
384
Andreas Gampe85b62f22015-09-09 13:15:38 -0700385class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 public:
Aart Bik42249c32016-01-07 15:33:50 -0800387 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000388 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389
390 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700392 __ Bind(GetEntryLabel());
393 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000394 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800395 instruction_,
396 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000397 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000398 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 }
400
Alexandre Rames9931f312015-06-19 14:47:01 +0100401 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
402
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
405};
406
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100407class ArraySetSlowPathX86_64 : public SlowPathCode {
408 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000409 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100410
411 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
412 LocationSummary* locations = instruction_->GetLocations();
413 __ Bind(GetEntryLabel());
414 SaveLiveRegisters(codegen, locations);
415
416 InvokeRuntimeCallingConvention calling_convention;
417 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
418 parallel_move.AddMove(
419 locations->InAt(0),
420 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
421 Primitive::kPrimNot,
422 nullptr);
423 parallel_move.AddMove(
424 locations->InAt(1),
425 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
426 Primitive::kPrimInt,
427 nullptr);
428 parallel_move.AddMove(
429 locations->InAt(2),
430 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
431 Primitive::kPrimNot,
432 nullptr);
433 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
434
Roland Levillain0d5a2812015-11-13 10:07:31 +0000435 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
436 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
437 instruction_,
438 instruction_->GetDexPc(),
439 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000440 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 RestoreLiveRegisters(codegen, locations);
442 __ jmp(GetExitLabel());
443 }
444
445 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
446
447 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100448 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
449};
450
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000451// Slow path marking an object during a read barrier.
452class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
453 public:
454 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000455 : SlowPathCode(instruction), out_(out), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000456 DCHECK(kEmitCompilerReadBarrier);
457 }
458
459 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
460
461 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
462 LocationSummary* locations = instruction_->GetLocations();
463 Register reg_out = out_.AsRegister<Register>();
464 DCHECK(locations->CanCall());
465 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
466 DCHECK(instruction_->IsInstanceFieldGet() ||
467 instruction_->IsStaticFieldGet() ||
468 instruction_->IsArrayGet() ||
469 instruction_->IsLoadClass() ||
470 instruction_->IsLoadString() ||
471 instruction_->IsInstanceOf() ||
472 instruction_->IsCheckCast())
473 << "Unexpected instruction in read barrier marking slow path: "
474 << instruction_->DebugName();
475
476 __ Bind(GetEntryLabel());
477 SaveLiveRegisters(codegen, locations);
478
479 InvokeRuntimeCallingConvention calling_convention;
480 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
481 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
482 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
483 instruction_,
484 instruction_->GetDexPc(),
485 this);
486 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
487 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
488
489 RestoreLiveRegisters(codegen, locations);
490 __ jmp(GetExitLabel());
491 }
492
493 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000494 const Location out_;
495 const Location obj_;
496
497 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
498};
499
Roland Levillain0d5a2812015-11-13 10:07:31 +0000500// Slow path generating a read barrier for a heap reference.
501class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
502 public:
503 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
504 Location out,
505 Location ref,
506 Location obj,
507 uint32_t offset,
508 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000509 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000510 out_(out),
511 ref_(ref),
512 obj_(obj),
513 offset_(offset),
514 index_(index) {
515 DCHECK(kEmitCompilerReadBarrier);
516 // If `obj` is equal to `out` or `ref`, it means the initial
517 // object has been overwritten by (or after) the heap object
518 // reference load to be instrumented, e.g.:
519 //
520 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000521 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000522 //
523 // In that case, we have lost the information about the original
524 // object, and the emitted read barrier cannot work properly.
525 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
526 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
527}
528
529 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
530 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
531 LocationSummary* locations = instruction_->GetLocations();
532 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
533 DCHECK(locations->CanCall());
534 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
535 DCHECK(!instruction_->IsInvoke() ||
536 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000537 instruction_->GetLocations()->Intrinsified()))
538 << "Unexpected instruction in read barrier for heap reference slow path: "
539 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000540
541 __ Bind(GetEntryLabel());
542 SaveLiveRegisters(codegen, locations);
543
544 // We may have to change the index's value, but as `index_` is a
545 // constant member (like other "inputs" of this slow path),
546 // introduce a copy of it, `index`.
547 Location index = index_;
548 if (index_.IsValid()) {
549 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
550 if (instruction_->IsArrayGet()) {
551 // Compute real offset and store it in index_.
552 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
553 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
554 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
555 // We are about to change the value of `index_reg` (see the
556 // calls to art::x86_64::X86_64Assembler::shll and
557 // art::x86_64::X86_64Assembler::AddImmediate below), but it
558 // has not been saved by the previous call to
559 // art::SlowPathCode::SaveLiveRegisters, as it is a
560 // callee-save register --
561 // art::SlowPathCode::SaveLiveRegisters does not consider
562 // callee-save registers, as it has been designed with the
563 // assumption that callee-save registers are supposed to be
564 // handled by the called function. So, as a callee-save
565 // register, `index_reg` _would_ eventually be saved onto
566 // the stack, but it would be too late: we would have
567 // changed its value earlier. Therefore, we manually save
568 // it here into another freely available register,
569 // `free_reg`, chosen of course among the caller-save
570 // registers (as a callee-save `free_reg` register would
571 // exhibit the same problem).
572 //
573 // Note we could have requested a temporary register from
574 // the register allocator instead; but we prefer not to, as
575 // this is a slow path, and we know we can find a
576 // caller-save register that is available.
577 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
578 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
579 index_reg = free_reg;
580 index = Location::RegisterLocation(index_reg);
581 } else {
582 // The initial register stored in `index_` has already been
583 // saved in the call to art::SlowPathCode::SaveLiveRegisters
584 // (as it is not a callee-save register), so we can freely
585 // use it.
586 }
587 // Shifting the index value contained in `index_reg` by the
588 // scale factor (2) cannot overflow in practice, as the
589 // runtime is unable to allocate object arrays with a size
590 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
591 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
592 static_assert(
593 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
594 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
595 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
596 } else {
597 DCHECK(instruction_->IsInvoke());
598 DCHECK(instruction_->GetLocations()->Intrinsified());
599 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
600 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
601 << instruction_->AsInvoke()->GetIntrinsic();
602 DCHECK_EQ(offset_, 0U);
603 DCHECK(index_.IsRegister());
604 }
605 }
606
607 // We're moving two or three locations to locations that could
608 // overlap, so we need a parallel move resolver.
609 InvokeRuntimeCallingConvention calling_convention;
610 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
611 parallel_move.AddMove(ref_,
612 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
613 Primitive::kPrimNot,
614 nullptr);
615 parallel_move.AddMove(obj_,
616 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
617 Primitive::kPrimNot,
618 nullptr);
619 if (index.IsValid()) {
620 parallel_move.AddMove(index,
621 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
622 Primitive::kPrimInt,
623 nullptr);
624 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
625 } else {
626 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
627 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
628 }
629 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
630 instruction_,
631 instruction_->GetDexPc(),
632 this);
633 CheckEntrypointTypes<
634 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
635 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
636
637 RestoreLiveRegisters(codegen, locations);
638 __ jmp(GetExitLabel());
639 }
640
641 const char* GetDescription() const OVERRIDE {
642 return "ReadBarrierForHeapReferenceSlowPathX86_64";
643 }
644
645 private:
646 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
647 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
648 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
649 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
650 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
651 return static_cast<CpuRegister>(i);
652 }
653 }
654 // We shall never fail to find a free caller-save register, as
655 // there are more than two core caller-save registers on x86-64
656 // (meaning it is possible to find one which is different from
657 // `ref` and `obj`).
658 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
659 LOG(FATAL) << "Could not find a free caller-save register";
660 UNREACHABLE();
661 }
662
Roland Levillain0d5a2812015-11-13 10:07:31 +0000663 const Location out_;
664 const Location ref_;
665 const Location obj_;
666 const uint32_t offset_;
667 // An additional location containing an index to an array.
668 // Only used for HArrayGet and the UnsafeGetObject &
669 // UnsafeGetObjectVolatile intrinsics.
670 const Location index_;
671
672 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
673};
674
675// Slow path generating a read barrier for a GC root.
676class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
677 public:
678 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000679 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000680 DCHECK(kEmitCompilerReadBarrier);
681 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000682
683 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
684 LocationSummary* locations = instruction_->GetLocations();
685 DCHECK(locations->CanCall());
686 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000687 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
688 << "Unexpected instruction in read barrier for GC root slow path: "
689 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000690
691 __ Bind(GetEntryLabel());
692 SaveLiveRegisters(codegen, locations);
693
694 InvokeRuntimeCallingConvention calling_convention;
695 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
696 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
697 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
698 instruction_,
699 instruction_->GetDexPc(),
700 this);
701 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
702 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
703
704 RestoreLiveRegisters(codegen, locations);
705 __ jmp(GetExitLabel());
706 }
707
708 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
709
710 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000711 const Location out_;
712 const Location root_;
713
714 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
715};
716
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100717#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700718// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
719#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100720
Roland Levillain4fa13f62015-07-06 18:11:54 +0100721inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700722 switch (cond) {
723 case kCondEQ: return kEqual;
724 case kCondNE: return kNotEqual;
725 case kCondLT: return kLess;
726 case kCondLE: return kLessEqual;
727 case kCondGT: return kGreater;
728 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700729 case kCondB: return kBelow;
730 case kCondBE: return kBelowEqual;
731 case kCondA: return kAbove;
732 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700733 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100734 LOG(FATAL) << "Unreachable";
735 UNREACHABLE();
736}
737
Aart Bike9f37602015-10-09 11:15:55 -0700738// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100739inline Condition X86_64FPCondition(IfCondition cond) {
740 switch (cond) {
741 case kCondEQ: return kEqual;
742 case kCondNE: return kNotEqual;
743 case kCondLT: return kBelow;
744 case kCondLE: return kBelowEqual;
745 case kCondGT: return kAbove;
746 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700747 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100748 };
749 LOG(FATAL) << "Unreachable";
750 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700751}
752
Vladimir Markodc151b22015-10-15 18:02:30 +0100753HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
754 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
755 MethodReference target_method ATTRIBUTE_UNUSED) {
756 switch (desired_dispatch_info.code_ptr_location) {
757 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
758 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
759 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
760 return HInvokeStaticOrDirect::DispatchInfo {
761 desired_dispatch_info.method_load_kind,
762 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
763 desired_dispatch_info.method_load_data,
764 0u
765 };
766 default:
767 return desired_dispatch_info;
768 }
769}
770
Serguei Katkov288c7a82016-05-16 11:53:15 +0600771Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
772 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800773 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000774 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
775 switch (invoke->GetMethodLoadKind()) {
776 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
777 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000778 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000779 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000780 break;
781 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000782 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000783 break;
784 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
785 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
786 break;
787 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
788 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
789 method_patches_.emplace_back(invoke->GetTargetMethod());
790 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
791 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000792 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000793 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000794 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000795 // Bind a new fixup label at the end of the "movl" insn.
796 uint32_t offset = invoke->GetDexCacheArrayOffset();
797 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000798 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000799 }
Vladimir Marko58155012015-08-19 12:49:41 +0000800 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000801 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000802 Register method_reg;
803 CpuRegister reg = temp.AsRegister<CpuRegister>();
804 if (current_method.IsRegister()) {
805 method_reg = current_method.AsRegister<Register>();
806 } else {
807 DCHECK(invoke->GetLocations()->Intrinsified());
808 DCHECK(!current_method.IsValid());
809 method_reg = reg.AsRegister();
810 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
811 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000812 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100813 __ movq(reg,
814 Address(CpuRegister(method_reg),
815 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100816 // temp = temp[index_in_cache];
817 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
818 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000819 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
820 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100821 }
Vladimir Marko58155012015-08-19 12:49:41 +0000822 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600823 return callee_method;
824}
825
826void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
827 Location temp) {
828 // All registers are assumed to be correctly set up.
829 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000830
831 switch (invoke->GetCodePtrLocation()) {
832 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
833 __ call(&frame_entry_label_);
834 break;
835 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
836 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
837 Label* label = &relative_call_patches_.back().label;
838 __ call(label); // Bind to the patch label, override at link time.
839 __ Bind(label); // Bind the label at the end of the "call" insn.
840 break;
841 }
842 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
843 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100844 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
845 LOG(FATAL) << "Unsupported";
846 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000847 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
848 // (callee_method + offset_of_quick_compiled_code)()
849 __ call(Address(callee_method.AsRegister<CpuRegister>(),
850 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
851 kX86_64WordSize).SizeValue()));
852 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000853 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800854
855 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800856}
857
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000858void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
859 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
860 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
861 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000862
863 // Use the calling convention instead of the location of the receiver, as
864 // intrinsics may have put the receiver in a different register. In the intrinsics
865 // slow path, the arguments have been moved to the right place, so here we are
866 // guaranteed that the receiver is the first register of the calling convention.
867 InvokeDexCallingConvention calling_convention;
868 Register receiver = calling_convention.GetRegisterAt(0);
869
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000870 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000871 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000872 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000873 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000874 // Instead of simply (possibly) unpoisoning `temp` here, we should
875 // emit a read barrier for the previous class reference load.
876 // However this is not required in practice, as this is an
877 // intermediate/temporary reference and because the current
878 // concurrent copying collector keeps the from-space memory
879 // intact/accessible until the end of the marking phase (the
880 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000881 __ MaybeUnpoisonHeapReference(temp);
882 // temp = temp->GetMethodAt(method_offset);
883 __ movq(temp, Address(temp, method_offset));
884 // call temp->GetEntryPoint();
885 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
886 kX86_64WordSize).SizeValue()));
887}
888
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000889void CodeGeneratorX86_64::RecordSimplePatch() {
890 if (GetCompilerOptions().GetIncludePatchInformation()) {
891 simple_patches_.emplace_back();
892 __ Bind(&simple_patches_.back());
893 }
894}
895
896void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
897 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
898 __ Bind(&string_patches_.back().label);
899}
900
901Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
902 uint32_t element_offset) {
903 // Add a patch entry and return the label.
904 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
905 return &pc_relative_dex_cache_patches_.back().label;
906}
907
Vladimir Marko58155012015-08-19 12:49:41 +0000908void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
909 DCHECK(linker_patches->empty());
910 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000911 method_patches_.size() +
912 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000913 pc_relative_dex_cache_patches_.size() +
914 simple_patches_.size() +
915 string_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000916 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000917 // The label points to the end of the "movl" insn but the literal offset for method
918 // patch needs to point to the embedded constant which occupies the last 4 bytes.
919 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000920 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000921 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000922 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
923 info.target_method.dex_file,
924 info.target_method.dex_method_index));
925 }
926 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000927 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000928 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
929 info.target_method.dex_file,
930 info.target_method.dex_method_index));
931 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000932 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
933 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000934 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
935 &info.target_dex_file,
936 info.label.Position(),
937 info.element_offset));
938 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000939 for (const Label& label : simple_patches_) {
940 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
941 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
942 }
943 for (const StringPatchInfo<Label>& info : string_patches_) {
944 // These are always PC-relative, see GetSupportedLoadStringKind().
945 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
946 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
947 &info.dex_file,
948 info.label.Position(),
949 info.string_index));
950 }
Vladimir Marko58155012015-08-19 12:49:41 +0000951}
952
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100953void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100954 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100955}
956
957void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100958 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100959}
960
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100961size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
962 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
963 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100964}
965
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100966size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
967 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
968 return kX86_64WordSize;
969}
970
971size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
972 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
973 return kX86_64WordSize;
974}
975
976size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
977 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
978 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100979}
980
Calin Juravle175dc732015-08-25 15:42:32 +0100981void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
982 HInstruction* instruction,
983 uint32_t dex_pc,
984 SlowPathCode* slow_path) {
985 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
986 instruction,
987 dex_pc,
988 slow_path);
989}
990
991void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100992 HInstruction* instruction,
993 uint32_t dex_pc,
994 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100995 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000996 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100997 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100998}
999
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001000static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001001// Use a fake return address register to mimic Quick.
1002static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001003CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001004 const X86_64InstructionSetFeatures& isa_features,
1005 const CompilerOptions& compiler_options,
1006 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001007 : CodeGenerator(graph,
1008 kNumberOfCpuRegisters,
1009 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001010 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001011 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1012 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001013 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001014 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1015 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001016 compiler_options,
1017 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001018 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001019 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001020 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001021 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001022 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001023 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001024 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001025 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1026 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001027 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001028 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1029 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001030 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001031 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1032}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001033
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001034InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1035 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001036 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001037 assembler_(codegen->GetAssembler()),
1038 codegen_(codegen) {}
1039
David Brazdil58282f42016-01-14 12:45:10 +00001040void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001041 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001042 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001043
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001044 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001045 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001046}
1047
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001048static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001049 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001050}
David Srbecky9d8606d2015-04-12 09:35:32 +01001051
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001052static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001053 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001054}
1055
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001056void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001057 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001058 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001059 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001060 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001061 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001062
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001063 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001064 __ testq(CpuRegister(RAX), Address(
1065 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001066 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001067 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001068
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001069 if (HasEmptyFrame()) {
1070 return;
1071 }
1072
Nicolas Geoffray98893962015-01-21 12:32:32 +00001073 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001074 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001075 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001076 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001077 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1078 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001079 }
1080 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001081
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001082 int adjust = GetFrameSize() - GetCoreSpillSize();
1083 __ subq(CpuRegister(RSP), Immediate(adjust));
1084 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001085 uint32_t xmm_spill_location = GetFpuSpillStart();
1086 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001087
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001088 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1089 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001090 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1091 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1092 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001093 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001094 }
1095
Mathieu Chartiere401d142015-04-22 13:56:20 -07001096 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001097 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001098}
1099
1100void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001101 __ cfi().RememberState();
1102 if (!HasEmptyFrame()) {
1103 uint32_t xmm_spill_location = GetFpuSpillStart();
1104 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1105 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1106 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1107 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1108 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1109 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1110 }
1111 }
1112
1113 int adjust = GetFrameSize() - GetCoreSpillSize();
1114 __ addq(CpuRegister(RSP), Immediate(adjust));
1115 __ cfi().AdjustCFAOffset(-adjust);
1116
1117 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1118 Register reg = kCoreCalleeSaves[i];
1119 if (allocated_registers_.ContainsCoreRegister(reg)) {
1120 __ popq(CpuRegister(reg));
1121 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1122 __ cfi().Restore(DWARFReg(reg));
1123 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001124 }
1125 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001126 __ ret();
1127 __ cfi().RestoreState();
1128 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001129}
1130
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001131void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1132 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001133}
1134
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001135void CodeGeneratorX86_64::Move(Location destination, Location source) {
1136 if (source.Equals(destination)) {
1137 return;
1138 }
1139 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001140 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001141 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001142 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001143 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001144 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001145 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001146 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1147 } else if (source.IsConstant()) {
1148 HConstant* constant = source.GetConstant();
1149 if (constant->IsLongConstant()) {
1150 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1151 } else {
1152 Load32BitValue(dest, GetInt32ValueOf(constant));
1153 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001154 } else {
1155 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001156 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001157 }
1158 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001159 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001160 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001161 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001162 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001163 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1164 } else if (source.IsConstant()) {
1165 HConstant* constant = source.GetConstant();
1166 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1167 if (constant->IsFloatConstant()) {
1168 Load32BitValue(dest, static_cast<int32_t>(value));
1169 } else {
1170 Load64BitValue(dest, value);
1171 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001172 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001173 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001174 } else {
1175 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001176 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001177 }
1178 } else if (destination.IsStackSlot()) {
1179 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001180 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001181 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001182 } else if (source.IsFpuRegister()) {
1183 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001184 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001185 } else if (source.IsConstant()) {
1186 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001187 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001188 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001189 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001190 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001191 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1192 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001193 }
1194 } else {
1195 DCHECK(destination.IsDoubleStackSlot());
1196 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001197 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001198 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001199 } else if (source.IsFpuRegister()) {
1200 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001201 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001202 } else if (source.IsConstant()) {
1203 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001204 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001205 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001206 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001207 } else {
1208 DCHECK(constant->IsLongConstant());
1209 value = constant->AsLongConstant()->GetValue();
1210 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001211 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001212 } else {
1213 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001214 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1215 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001216 }
1217 }
1218}
1219
Calin Juravle175dc732015-08-25 15:42:32 +01001220void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1221 DCHECK(location.IsRegister());
1222 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1223}
1224
Calin Juravlee460d1d2015-09-29 04:52:17 +01001225void CodeGeneratorX86_64::MoveLocation(
1226 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1227 Move(dst, src);
1228}
1229
1230void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1231 if (location.IsRegister()) {
1232 locations->AddTemp(location);
1233 } else {
1234 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1235 }
1236}
1237
David Brazdilfc6a86a2015-06-26 10:33:45 +00001238void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001239 DCHECK(!successor->IsExitBlock());
1240
1241 HBasicBlock* block = got->GetBlock();
1242 HInstruction* previous = got->GetPrevious();
1243
1244 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001245 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001246 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1247 return;
1248 }
1249
1250 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1251 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1252 }
1253 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001254 __ jmp(codegen_->GetLabelOf(successor));
1255 }
1256}
1257
David Brazdilfc6a86a2015-06-26 10:33:45 +00001258void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1259 got->SetLocations(nullptr);
1260}
1261
1262void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1263 HandleGoto(got, got->GetSuccessor());
1264}
1265
1266void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1267 try_boundary->SetLocations(nullptr);
1268}
1269
1270void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1271 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1272 if (!successor->IsExitBlock()) {
1273 HandleGoto(try_boundary, successor);
1274 }
1275}
1276
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001277void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1278 exit->SetLocations(nullptr);
1279}
1280
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001281void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001282}
1283
Mark Mendell152408f2015-12-31 12:28:50 -05001284template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001285void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001286 LabelType* true_label,
1287 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001288 if (cond->IsFPConditionTrueIfNaN()) {
1289 __ j(kUnordered, true_label);
1290 } else if (cond->IsFPConditionFalseIfNaN()) {
1291 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001292 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001293 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001294}
1295
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001296void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001297 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001298
Mark Mendellc4701932015-04-10 13:18:51 -04001299 Location left = locations->InAt(0);
1300 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001301 Primitive::Type type = condition->InputAt(0)->GetType();
1302 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001303 case Primitive::kPrimBoolean:
1304 case Primitive::kPrimByte:
1305 case Primitive::kPrimChar:
1306 case Primitive::kPrimShort:
1307 case Primitive::kPrimInt:
1308 case Primitive::kPrimNot: {
1309 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1310 if (right.IsConstant()) {
1311 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1312 if (value == 0) {
1313 __ testl(left_reg, left_reg);
1314 } else {
1315 __ cmpl(left_reg, Immediate(value));
1316 }
1317 } else if (right.IsStackSlot()) {
1318 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1319 } else {
1320 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1321 }
1322 break;
1323 }
Mark Mendellc4701932015-04-10 13:18:51 -04001324 case Primitive::kPrimLong: {
1325 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1326 if (right.IsConstant()) {
1327 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001328 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001329 } else if (right.IsDoubleStackSlot()) {
1330 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1331 } else {
1332 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1333 }
Mark Mendellc4701932015-04-10 13:18:51 -04001334 break;
1335 }
1336 case Primitive::kPrimFloat: {
1337 if (right.IsFpuRegister()) {
1338 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1339 } else if (right.IsConstant()) {
1340 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1341 codegen_->LiteralFloatAddress(
1342 right.GetConstant()->AsFloatConstant()->GetValue()));
1343 } else {
1344 DCHECK(right.IsStackSlot());
1345 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1346 Address(CpuRegister(RSP), right.GetStackIndex()));
1347 }
Mark Mendellc4701932015-04-10 13:18:51 -04001348 break;
1349 }
1350 case Primitive::kPrimDouble: {
1351 if (right.IsFpuRegister()) {
1352 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1353 } else if (right.IsConstant()) {
1354 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1355 codegen_->LiteralDoubleAddress(
1356 right.GetConstant()->AsDoubleConstant()->GetValue()));
1357 } else {
1358 DCHECK(right.IsDoubleStackSlot());
1359 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1360 Address(CpuRegister(RSP), right.GetStackIndex()));
1361 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001362 break;
1363 }
1364 default:
1365 LOG(FATAL) << "Unexpected condition type " << type;
1366 }
1367}
1368
1369template<class LabelType>
1370void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1371 LabelType* true_target_in,
1372 LabelType* false_target_in) {
1373 // Generated branching requires both targets to be explicit. If either of the
1374 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1375 LabelType fallthrough_target;
1376 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1377 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1378
1379 // Generate the comparison to set the CC.
1380 GenerateCompareTest(condition);
1381
1382 // Now generate the correct jump(s).
1383 Primitive::Type type = condition->InputAt(0)->GetType();
1384 switch (type) {
1385 case Primitive::kPrimLong: {
1386 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1387 break;
1388 }
1389 case Primitive::kPrimFloat: {
1390 GenerateFPJumps(condition, true_target, false_target);
1391 break;
1392 }
1393 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001394 GenerateFPJumps(condition, true_target, false_target);
1395 break;
1396 }
1397 default:
1398 LOG(FATAL) << "Unexpected condition type " << type;
1399 }
1400
David Brazdil0debae72015-11-12 18:37:00 +00001401 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001402 __ jmp(false_target);
1403 }
David Brazdil0debae72015-11-12 18:37:00 +00001404
1405 if (fallthrough_target.IsLinked()) {
1406 __ Bind(&fallthrough_target);
1407 }
Mark Mendellc4701932015-04-10 13:18:51 -04001408}
1409
David Brazdil0debae72015-11-12 18:37:00 +00001410static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1411 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1412 // are set only strictly before `branch`. We can't use the eflags on long
1413 // conditions if they are materialized due to the complex branching.
1414 return cond->IsCondition() &&
1415 cond->GetNext() == branch &&
1416 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1417}
1418
Mark Mendell152408f2015-12-31 12:28:50 -05001419template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001420void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001421 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001422 LabelType* true_target,
1423 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001424 HInstruction* cond = instruction->InputAt(condition_input_index);
1425
1426 if (true_target == nullptr && false_target == nullptr) {
1427 // Nothing to do. The code always falls through.
1428 return;
1429 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001430 // Constant condition, statically compared against "true" (integer value 1).
1431 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001432 if (true_target != nullptr) {
1433 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001434 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001435 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001436 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001437 if (false_target != nullptr) {
1438 __ jmp(false_target);
1439 }
1440 }
1441 return;
1442 }
1443
1444 // The following code generates these patterns:
1445 // (1) true_target == nullptr && false_target != nullptr
1446 // - opposite condition true => branch to false_target
1447 // (2) true_target != nullptr && false_target == nullptr
1448 // - condition true => branch to true_target
1449 // (3) true_target != nullptr && false_target != nullptr
1450 // - condition true => branch to true_target
1451 // - branch to false_target
1452 if (IsBooleanValueOrMaterializedCondition(cond)) {
1453 if (AreEflagsSetFrom(cond, instruction)) {
1454 if (true_target == nullptr) {
1455 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1456 } else {
1457 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1458 }
1459 } else {
1460 // Materialized condition, compare against 0.
1461 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1462 if (lhs.IsRegister()) {
1463 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1464 } else {
1465 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1466 }
1467 if (true_target == nullptr) {
1468 __ j(kEqual, false_target);
1469 } else {
1470 __ j(kNotEqual, true_target);
1471 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001472 }
1473 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001474 // Condition has not been materialized, use its inputs as the
1475 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001476 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001477
David Brazdil0debae72015-11-12 18:37:00 +00001478 // If this is a long or FP comparison that has been folded into
1479 // the HCondition, generate the comparison directly.
1480 Primitive::Type type = condition->InputAt(0)->GetType();
1481 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1482 GenerateCompareTestAndBranch(condition, true_target, false_target);
1483 return;
1484 }
1485
1486 Location lhs = condition->GetLocations()->InAt(0);
1487 Location rhs = condition->GetLocations()->InAt(1);
1488 if (rhs.IsRegister()) {
1489 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1490 } else if (rhs.IsConstant()) {
1491 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001492 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001493 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001494 __ cmpl(lhs.AsRegister<CpuRegister>(),
1495 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1496 }
1497 if (true_target == nullptr) {
1498 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1499 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001500 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001501 }
Dave Allison20dfc792014-06-16 20:44:29 -07001502 }
David Brazdil0debae72015-11-12 18:37:00 +00001503
1504 // If neither branch falls through (case 3), the conditional branch to `true_target`
1505 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1506 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001507 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001508 }
1509}
1510
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001511void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001512 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1513 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001514 locations->SetInAt(0, Location::Any());
1515 }
1516}
1517
1518void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001519 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1520 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1521 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1522 nullptr : codegen_->GetLabelOf(true_successor);
1523 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1524 nullptr : codegen_->GetLabelOf(false_successor);
1525 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001526}
1527
1528void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1529 LocationSummary* locations = new (GetGraph()->GetArena())
1530 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001531 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001532 locations->SetInAt(0, Location::Any());
1533 }
1534}
1535
1536void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001537 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001538 GenerateTestAndBranch<Label>(deoptimize,
1539 /* condition_input_index */ 0,
1540 slow_path->GetEntryLabel(),
1541 /* false_target */ nullptr);
1542}
1543
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001544static bool SelectCanUseCMOV(HSelect* select) {
1545 // There are no conditional move instructions for XMMs.
1546 if (Primitive::IsFloatingPointType(select->GetType())) {
1547 return false;
1548 }
1549
1550 // A FP condition doesn't generate the single CC that we need.
1551 HInstruction* condition = select->GetCondition();
1552 if (condition->IsCondition() &&
1553 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1554 return false;
1555 }
1556
1557 // We can generate a CMOV for this Select.
1558 return true;
1559}
1560
David Brazdil74eb1b22015-12-14 11:44:01 +00001561void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1562 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1563 if (Primitive::IsFloatingPointType(select->GetType())) {
1564 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001565 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001566 } else {
1567 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001568 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001569 if (select->InputAt(1)->IsConstant()) {
1570 locations->SetInAt(1, Location::RequiresRegister());
1571 } else {
1572 locations->SetInAt(1, Location::Any());
1573 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001574 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001575 locations->SetInAt(1, Location::Any());
1576 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001577 }
1578 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1579 locations->SetInAt(2, Location::RequiresRegister());
1580 }
1581 locations->SetOut(Location::SameAsFirstInput());
1582}
1583
1584void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1585 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001586 if (SelectCanUseCMOV(select)) {
1587 // If both the condition and the source types are integer, we can generate
1588 // a CMOV to implement Select.
1589 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001590 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001591 DCHECK(locations->InAt(0).Equals(locations->Out()));
1592
1593 HInstruction* select_condition = select->GetCondition();
1594 Condition cond = kNotEqual;
1595
1596 // Figure out how to test the 'condition'.
1597 if (select_condition->IsCondition()) {
1598 HCondition* condition = select_condition->AsCondition();
1599 if (!condition->IsEmittedAtUseSite()) {
1600 // This was a previously materialized condition.
1601 // Can we use the existing condition code?
1602 if (AreEflagsSetFrom(condition, select)) {
1603 // Materialization was the previous instruction. Condition codes are right.
1604 cond = X86_64IntegerCondition(condition->GetCondition());
1605 } else {
1606 // No, we have to recreate the condition code.
1607 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1608 __ testl(cond_reg, cond_reg);
1609 }
1610 } else {
1611 GenerateCompareTest(condition);
1612 cond = X86_64IntegerCondition(condition->GetCondition());
1613 }
1614 } else {
1615 // Must be a boolean condition, which needs to be compared to 0.
1616 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1617 __ testl(cond_reg, cond_reg);
1618 }
1619
1620 // If the condition is true, overwrite the output, which already contains false.
1621 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001622 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1623 if (value_true_loc.IsRegister()) {
1624 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1625 } else {
1626 __ cmov(cond,
1627 value_false,
1628 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1629 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001630 } else {
1631 NearLabel false_target;
1632 GenerateTestAndBranch<NearLabel>(select,
1633 /* condition_input_index */ 2,
1634 /* true_target */ nullptr,
1635 &false_target);
1636 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1637 __ Bind(&false_target);
1638 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001639}
1640
David Srbecky0cf44932015-12-09 14:09:59 +00001641void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1642 new (GetGraph()->GetArena()) LocationSummary(info);
1643}
1644
David Srbeckyd28f4a02016-03-14 17:14:24 +00001645void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1646 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001647}
1648
1649void CodeGeneratorX86_64::GenerateNop() {
1650 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001651}
1652
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001653void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001654 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001655 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001656 // Handle the long/FP comparisons made in instruction simplification.
1657 switch (cond->InputAt(0)->GetType()) {
1658 case Primitive::kPrimLong:
1659 locations->SetInAt(0, Location::RequiresRegister());
1660 locations->SetInAt(1, Location::Any());
1661 break;
1662 case Primitive::kPrimFloat:
1663 case Primitive::kPrimDouble:
1664 locations->SetInAt(0, Location::RequiresFpuRegister());
1665 locations->SetInAt(1, Location::Any());
1666 break;
1667 default:
1668 locations->SetInAt(0, Location::RequiresRegister());
1669 locations->SetInAt(1, Location::Any());
1670 break;
1671 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001672 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001673 locations->SetOut(Location::RequiresRegister());
1674 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001675}
1676
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001677void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001678 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001679 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001680 }
Mark Mendellc4701932015-04-10 13:18:51 -04001681
1682 LocationSummary* locations = cond->GetLocations();
1683 Location lhs = locations->InAt(0);
1684 Location rhs = locations->InAt(1);
1685 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001686 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001687
1688 switch (cond->InputAt(0)->GetType()) {
1689 default:
1690 // Integer case.
1691
1692 // Clear output register: setcc only sets the low byte.
1693 __ xorl(reg, reg);
1694
1695 if (rhs.IsRegister()) {
1696 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1697 } else if (rhs.IsConstant()) {
1698 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001699 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001700 } else {
1701 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1702 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001703 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001704 return;
1705 case Primitive::kPrimLong:
1706 // Clear output register: setcc only sets the low byte.
1707 __ xorl(reg, reg);
1708
1709 if (rhs.IsRegister()) {
1710 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1711 } else if (rhs.IsConstant()) {
1712 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001713 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001714 } else {
1715 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1716 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001717 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001718 return;
1719 case Primitive::kPrimFloat: {
1720 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1721 if (rhs.IsConstant()) {
1722 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1723 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1724 } else if (rhs.IsStackSlot()) {
1725 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1726 } else {
1727 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1728 }
1729 GenerateFPJumps(cond, &true_label, &false_label);
1730 break;
1731 }
1732 case Primitive::kPrimDouble: {
1733 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1734 if (rhs.IsConstant()) {
1735 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1736 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1737 } else if (rhs.IsDoubleStackSlot()) {
1738 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1739 } else {
1740 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1741 }
1742 GenerateFPJumps(cond, &true_label, &false_label);
1743 break;
1744 }
1745 }
1746
1747 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001748 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001749
Roland Levillain4fa13f62015-07-06 18:11:54 +01001750 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001751 __ Bind(&false_label);
1752 __ xorl(reg, reg);
1753 __ jmp(&done_label);
1754
Roland Levillain4fa13f62015-07-06 18:11:54 +01001755 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001756 __ Bind(&true_label);
1757 __ movl(reg, Immediate(1));
1758 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001759}
1760
1761void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001762 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001763}
1764
1765void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001766 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001767}
1768
1769void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001770 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001771}
1772
1773void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001774 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001775}
1776
1777void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001778 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001779}
1780
1781void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001782 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001783}
1784
1785void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001786 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001787}
1788
1789void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001790 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001791}
1792
1793void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001794 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001795}
1796
1797void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001798 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001799}
1800
1801void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001802 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001803}
1804
1805void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001806 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001807}
1808
Aart Bike9f37602015-10-09 11:15:55 -07001809void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001810 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001811}
1812
1813void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001814 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001815}
1816
1817void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001818 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001819}
1820
1821void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001822 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001823}
1824
1825void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001826 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001827}
1828
1829void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001830 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001831}
1832
1833void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001834 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001835}
1836
1837void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001838 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001839}
1840
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001841void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001842 LocationSummary* locations =
1843 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001844 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001845 case Primitive::kPrimBoolean:
1846 case Primitive::kPrimByte:
1847 case Primitive::kPrimShort:
1848 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001849 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001850 case Primitive::kPrimLong: {
1851 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001852 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001853 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1854 break;
1855 }
1856 case Primitive::kPrimFloat:
1857 case Primitive::kPrimDouble: {
1858 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001859 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001860 locations->SetOut(Location::RequiresRegister());
1861 break;
1862 }
1863 default:
1864 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1865 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001866}
1867
1868void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001869 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001870 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001871 Location left = locations->InAt(0);
1872 Location right = locations->InAt(1);
1873
Mark Mendell0c9497d2015-08-21 09:30:05 -04001874 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001875 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001876 Condition less_cond = kLess;
1877
Calin Juravleddb7df22014-11-25 20:56:51 +00001878 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001879 case Primitive::kPrimBoolean:
1880 case Primitive::kPrimByte:
1881 case Primitive::kPrimShort:
1882 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001883 case Primitive::kPrimInt: {
1884 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1885 if (right.IsConstant()) {
1886 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1887 codegen_->Compare32BitValue(left_reg, value);
1888 } else if (right.IsStackSlot()) {
1889 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1890 } else {
1891 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1892 }
1893 break;
1894 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001895 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001896 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1897 if (right.IsConstant()) {
1898 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001899 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001900 } else if (right.IsDoubleStackSlot()) {
1901 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001902 } else {
1903 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1904 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001905 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001906 }
1907 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001908 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1909 if (right.IsConstant()) {
1910 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1911 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1912 } else if (right.IsStackSlot()) {
1913 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1914 } else {
1915 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1916 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001917 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001918 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001919 break;
1920 }
1921 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001922 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1923 if (right.IsConstant()) {
1924 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1925 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1926 } else if (right.IsDoubleStackSlot()) {
1927 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1928 } else {
1929 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1930 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001931 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001932 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001933 break;
1934 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001935 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001936 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001937 }
Aart Bika19616e2016-02-01 18:57:58 -08001938
Calin Juravleddb7df22014-11-25 20:56:51 +00001939 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001940 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001941 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001942
Calin Juravle91debbc2014-11-26 19:01:09 +00001943 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001944 __ movl(out, Immediate(1));
1945 __ jmp(&done);
1946
1947 __ Bind(&less);
1948 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001949
1950 __ Bind(&done);
1951}
1952
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001953void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001954 LocationSummary* locations =
1955 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001956 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001957}
1958
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001959void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001960 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001961}
1962
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001963void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1964 LocationSummary* locations =
1965 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1966 locations->SetOut(Location::ConstantLocation(constant));
1967}
1968
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001969void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001970 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001971}
1972
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001973void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001974 LocationSummary* locations =
1975 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001976 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001977}
1978
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001979void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001980 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001981}
1982
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001983void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1984 LocationSummary* locations =
1985 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1986 locations->SetOut(Location::ConstantLocation(constant));
1987}
1988
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001989void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001990 // Will be generated at use site.
1991}
1992
1993void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1994 LocationSummary* locations =
1995 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1996 locations->SetOut(Location::ConstantLocation(constant));
1997}
1998
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001999void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2000 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002001 // Will be generated at use site.
2002}
2003
Calin Juravle27df7582015-04-17 19:12:31 +01002004void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2005 memory_barrier->SetLocations(nullptr);
2006}
2007
2008void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002009 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002010}
2011
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002012void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2013 ret->SetLocations(nullptr);
2014}
2015
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002016void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002017 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002018}
2019
2020void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002021 LocationSummary* locations =
2022 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002023 switch (ret->InputAt(0)->GetType()) {
2024 case Primitive::kPrimBoolean:
2025 case Primitive::kPrimByte:
2026 case Primitive::kPrimChar:
2027 case Primitive::kPrimShort:
2028 case Primitive::kPrimInt:
2029 case Primitive::kPrimNot:
2030 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002031 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002032 break;
2033
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002034 case Primitive::kPrimFloat:
2035 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002036 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002037 break;
2038
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002039 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002040 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002041 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002042}
2043
2044void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2045 if (kIsDebugBuild) {
2046 switch (ret->InputAt(0)->GetType()) {
2047 case Primitive::kPrimBoolean:
2048 case Primitive::kPrimByte:
2049 case Primitive::kPrimChar:
2050 case Primitive::kPrimShort:
2051 case Primitive::kPrimInt:
2052 case Primitive::kPrimNot:
2053 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002054 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002055 break;
2056
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002057 case Primitive::kPrimFloat:
2058 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002059 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002060 XMM0);
2061 break;
2062
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002063 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002064 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002065 }
2066 }
2067 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002068}
2069
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002070Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2071 switch (type) {
2072 case Primitive::kPrimBoolean:
2073 case Primitive::kPrimByte:
2074 case Primitive::kPrimChar:
2075 case Primitive::kPrimShort:
2076 case Primitive::kPrimInt:
2077 case Primitive::kPrimNot:
2078 case Primitive::kPrimLong:
2079 return Location::RegisterLocation(RAX);
2080
2081 case Primitive::kPrimVoid:
2082 return Location::NoLocation();
2083
2084 case Primitive::kPrimDouble:
2085 case Primitive::kPrimFloat:
2086 return Location::FpuRegisterLocation(XMM0);
2087 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002088
2089 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002090}
2091
2092Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2093 return Location::RegisterLocation(kMethodRegisterArgument);
2094}
2095
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002096Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002097 switch (type) {
2098 case Primitive::kPrimBoolean:
2099 case Primitive::kPrimByte:
2100 case Primitive::kPrimChar:
2101 case Primitive::kPrimShort:
2102 case Primitive::kPrimInt:
2103 case Primitive::kPrimNot: {
2104 uint32_t index = gp_index_++;
2105 stack_index_++;
2106 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002107 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002108 } else {
2109 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2110 }
2111 }
2112
2113 case Primitive::kPrimLong: {
2114 uint32_t index = gp_index_;
2115 stack_index_ += 2;
2116 if (index < calling_convention.GetNumberOfRegisters()) {
2117 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002118 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002119 } else {
2120 gp_index_ += 2;
2121 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2122 }
2123 }
2124
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002125 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002126 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002127 stack_index_++;
2128 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002129 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002130 } else {
2131 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2132 }
2133 }
2134
2135 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002136 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002137 stack_index_ += 2;
2138 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002139 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002140 } else {
2141 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2142 }
2143 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002144
2145 case Primitive::kPrimVoid:
2146 LOG(FATAL) << "Unexpected parameter type " << type;
2147 break;
2148 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002149 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002150}
2151
Calin Juravle175dc732015-08-25 15:42:32 +01002152void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2153 // The trampoline uses the same calling convention as dex calling conventions,
2154 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2155 // the method_idx.
2156 HandleInvoke(invoke);
2157}
2158
2159void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2160 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2161}
2162
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002163void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002164 // Explicit clinit checks triggered by static invokes must have been pruned by
2165 // art::PrepareForRegisterAllocation.
2166 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002167
Mark Mendellfb8d2792015-03-31 22:16:59 -04002168 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002169 if (intrinsic.TryDispatch(invoke)) {
2170 return;
2171 }
2172
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002173 HandleInvoke(invoke);
2174}
2175
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002176static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2177 if (invoke->GetLocations()->Intrinsified()) {
2178 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2179 intrinsic.Dispatch(invoke);
2180 return true;
2181 }
2182 return false;
2183}
2184
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002185void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002186 // Explicit clinit checks triggered by static invokes must have been pruned by
2187 // art::PrepareForRegisterAllocation.
2188 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002189
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002190 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2191 return;
2192 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002193
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002194 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002195 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002196 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002197 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002198}
2199
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002200void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002201 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002202 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002203}
2204
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002205void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002206 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002207 if (intrinsic.TryDispatch(invoke)) {
2208 return;
2209 }
2210
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002211 HandleInvoke(invoke);
2212}
2213
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002214void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002215 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2216 return;
2217 }
2218
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002219 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002220 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002221 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002222}
2223
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002224void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2225 HandleInvoke(invoke);
2226 // Add the hidden argument.
2227 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2228}
2229
2230void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2231 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002232 LocationSummary* locations = invoke->GetLocations();
2233 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2234 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002235 Location receiver = locations->InAt(0);
2236 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2237
Roland Levillain0d5a2812015-11-13 10:07:31 +00002238 // Set the hidden argument. This is safe to do this here, as RAX
2239 // won't be modified thereafter, before the `call` instruction.
2240 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002241 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002242
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002243 if (receiver.IsStackSlot()) {
2244 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002245 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002246 __ movl(temp, Address(temp, class_offset));
2247 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002248 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002249 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002250 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002251 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002252 // Instead of simply (possibly) unpoisoning `temp` here, we should
2253 // emit a read barrier for the previous class reference load.
2254 // However this is not required in practice, as this is an
2255 // intermediate/temporary reference and because the current
2256 // concurrent copying collector keeps the from-space memory
2257 // intact/accessible until the end of the marking phase (the
2258 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002259 __ MaybeUnpoisonHeapReference(temp);
Nelli Kimbadee982016-05-13 13:08:53 +03002260 // temp = temp->GetAddressOfIMT()
2261 __ movq(temp,
2262 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2263 // temp = temp->GetImtEntryAt(method_offset);
2264 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07002265 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002266 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002267 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002268 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002269 __ call(Address(temp,
2270 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002271
2272 DCHECK(!codegen_->IsLeafMethod());
2273 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2274}
2275
Roland Levillain88cb1752014-10-20 16:36:47 +01002276void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2277 LocationSummary* locations =
2278 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2279 switch (neg->GetResultType()) {
2280 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002281 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002282 locations->SetInAt(0, Location::RequiresRegister());
2283 locations->SetOut(Location::SameAsFirstInput());
2284 break;
2285
Roland Levillain88cb1752014-10-20 16:36:47 +01002286 case Primitive::kPrimFloat:
2287 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002288 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002289 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002290 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002291 break;
2292
2293 default:
2294 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2295 }
2296}
2297
2298void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2299 LocationSummary* locations = neg->GetLocations();
2300 Location out = locations->Out();
2301 Location in = locations->InAt(0);
2302 switch (neg->GetResultType()) {
2303 case Primitive::kPrimInt:
2304 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002305 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002306 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002307 break;
2308
2309 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002310 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002311 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002312 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002313 break;
2314
Roland Levillain5368c212014-11-27 15:03:41 +00002315 case Primitive::kPrimFloat: {
2316 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002317 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002318 // Implement float negation with an exclusive or with value
2319 // 0x80000000 (mask for bit 31, representing the sign of a
2320 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002321 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002322 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002323 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002324 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002325
Roland Levillain5368c212014-11-27 15:03:41 +00002326 case Primitive::kPrimDouble: {
2327 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002328 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002329 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002330 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002331 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002332 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002333 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002334 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002335 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002336
2337 default:
2338 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2339 }
2340}
2341
Roland Levillaindff1f282014-11-05 14:15:05 +00002342void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2343 LocationSummary* locations =
2344 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2345 Primitive::Type result_type = conversion->GetResultType();
2346 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002347 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002348
David Brazdilb2bd1c52015-03-25 11:17:37 +00002349 // The Java language does not allow treating boolean as an integral type but
2350 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002351
Roland Levillaindff1f282014-11-05 14:15:05 +00002352 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002353 case Primitive::kPrimByte:
2354 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002355 case Primitive::kPrimLong:
2356 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002357 case Primitive::kPrimBoolean:
2358 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002359 case Primitive::kPrimShort:
2360 case Primitive::kPrimInt:
2361 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002362 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002363 locations->SetInAt(0, Location::Any());
2364 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2365 break;
2366
2367 default:
2368 LOG(FATAL) << "Unexpected type conversion from " << input_type
2369 << " to " << result_type;
2370 }
2371 break;
2372
Roland Levillain01a8d712014-11-14 16:27:39 +00002373 case Primitive::kPrimShort:
2374 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002375 case Primitive::kPrimLong:
2376 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002377 case Primitive::kPrimBoolean:
2378 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002379 case Primitive::kPrimByte:
2380 case Primitive::kPrimInt:
2381 case Primitive::kPrimChar:
2382 // Processing a Dex `int-to-short' instruction.
2383 locations->SetInAt(0, Location::Any());
2384 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2385 break;
2386
2387 default:
2388 LOG(FATAL) << "Unexpected type conversion from " << input_type
2389 << " to " << result_type;
2390 }
2391 break;
2392
Roland Levillain946e1432014-11-11 17:35:19 +00002393 case Primitive::kPrimInt:
2394 switch (input_type) {
2395 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002396 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002397 locations->SetInAt(0, Location::Any());
2398 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2399 break;
2400
2401 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002402 // Processing a Dex `float-to-int' instruction.
2403 locations->SetInAt(0, Location::RequiresFpuRegister());
2404 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002405 break;
2406
Roland Levillain946e1432014-11-11 17:35:19 +00002407 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002408 // Processing a Dex `double-to-int' instruction.
2409 locations->SetInAt(0, Location::RequiresFpuRegister());
2410 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002411 break;
2412
2413 default:
2414 LOG(FATAL) << "Unexpected type conversion from " << input_type
2415 << " to " << result_type;
2416 }
2417 break;
2418
Roland Levillaindff1f282014-11-05 14:15:05 +00002419 case Primitive::kPrimLong:
2420 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002421 case Primitive::kPrimBoolean:
2422 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002423 case Primitive::kPrimByte:
2424 case Primitive::kPrimShort:
2425 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002426 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002427 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002428 // TODO: We would benefit from a (to-be-implemented)
2429 // Location::RegisterOrStackSlot requirement for this input.
2430 locations->SetInAt(0, Location::RequiresRegister());
2431 locations->SetOut(Location::RequiresRegister());
2432 break;
2433
2434 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002435 // Processing a Dex `float-to-long' instruction.
2436 locations->SetInAt(0, Location::RequiresFpuRegister());
2437 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002438 break;
2439
Roland Levillaindff1f282014-11-05 14:15:05 +00002440 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002441 // Processing a Dex `double-to-long' instruction.
2442 locations->SetInAt(0, Location::RequiresFpuRegister());
2443 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002444 break;
2445
2446 default:
2447 LOG(FATAL) << "Unexpected type conversion from " << input_type
2448 << " to " << result_type;
2449 }
2450 break;
2451
Roland Levillain981e4542014-11-14 11:47:14 +00002452 case Primitive::kPrimChar:
2453 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002454 case Primitive::kPrimLong:
2455 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002456 case Primitive::kPrimBoolean:
2457 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002458 case Primitive::kPrimByte:
2459 case Primitive::kPrimShort:
2460 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002461 // Processing a Dex `int-to-char' instruction.
2462 locations->SetInAt(0, Location::Any());
2463 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2464 break;
2465
2466 default:
2467 LOG(FATAL) << "Unexpected type conversion from " << input_type
2468 << " to " << result_type;
2469 }
2470 break;
2471
Roland Levillaindff1f282014-11-05 14:15:05 +00002472 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002473 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002474 case Primitive::kPrimBoolean:
2475 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002476 case Primitive::kPrimByte:
2477 case Primitive::kPrimShort:
2478 case Primitive::kPrimInt:
2479 case Primitive::kPrimChar:
2480 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002481 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002482 locations->SetOut(Location::RequiresFpuRegister());
2483 break;
2484
2485 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002486 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002487 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002488 locations->SetOut(Location::RequiresFpuRegister());
2489 break;
2490
Roland Levillaincff13742014-11-17 14:32:17 +00002491 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002492 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002493 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002494 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002495 break;
2496
2497 default:
2498 LOG(FATAL) << "Unexpected type conversion from " << input_type
2499 << " to " << result_type;
2500 };
2501 break;
2502
Roland Levillaindff1f282014-11-05 14:15:05 +00002503 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002504 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002505 case Primitive::kPrimBoolean:
2506 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002507 case Primitive::kPrimByte:
2508 case Primitive::kPrimShort:
2509 case Primitive::kPrimInt:
2510 case Primitive::kPrimChar:
2511 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002512 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002513 locations->SetOut(Location::RequiresFpuRegister());
2514 break;
2515
2516 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002517 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002518 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002519 locations->SetOut(Location::RequiresFpuRegister());
2520 break;
2521
Roland Levillaincff13742014-11-17 14:32:17 +00002522 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002523 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002524 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002525 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002526 break;
2527
2528 default:
2529 LOG(FATAL) << "Unexpected type conversion from " << input_type
2530 << " to " << result_type;
2531 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002532 break;
2533
2534 default:
2535 LOG(FATAL) << "Unexpected type conversion from " << input_type
2536 << " to " << result_type;
2537 }
2538}
2539
2540void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2541 LocationSummary* locations = conversion->GetLocations();
2542 Location out = locations->Out();
2543 Location in = locations->InAt(0);
2544 Primitive::Type result_type = conversion->GetResultType();
2545 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002546 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002547 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002548 case Primitive::kPrimByte:
2549 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002550 case Primitive::kPrimLong:
2551 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002552 case Primitive::kPrimBoolean:
2553 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002554 case Primitive::kPrimShort:
2555 case Primitive::kPrimInt:
2556 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002557 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002558 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002559 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002560 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002561 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002562 Address(CpuRegister(RSP), in.GetStackIndex()));
2563 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002564 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002565 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002566 }
2567 break;
2568
2569 default:
2570 LOG(FATAL) << "Unexpected type conversion from " << input_type
2571 << " to " << result_type;
2572 }
2573 break;
2574
Roland Levillain01a8d712014-11-14 16:27:39 +00002575 case Primitive::kPrimShort:
2576 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002577 case Primitive::kPrimLong:
2578 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002579 case Primitive::kPrimBoolean:
2580 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002581 case Primitive::kPrimByte:
2582 case Primitive::kPrimInt:
2583 case Primitive::kPrimChar:
2584 // Processing a Dex `int-to-short' instruction.
2585 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002586 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002587 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002588 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002589 Address(CpuRegister(RSP), in.GetStackIndex()));
2590 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002591 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002592 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002593 }
2594 break;
2595
2596 default:
2597 LOG(FATAL) << "Unexpected type conversion from " << input_type
2598 << " to " << result_type;
2599 }
2600 break;
2601
Roland Levillain946e1432014-11-11 17:35:19 +00002602 case Primitive::kPrimInt:
2603 switch (input_type) {
2604 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002605 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002606 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002607 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002608 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002609 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002610 Address(CpuRegister(RSP), in.GetStackIndex()));
2611 } else {
2612 DCHECK(in.IsConstant());
2613 DCHECK(in.GetConstant()->IsLongConstant());
2614 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002615 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002616 }
2617 break;
2618
Roland Levillain3f8f9362014-12-02 17:45:01 +00002619 case Primitive::kPrimFloat: {
2620 // Processing a Dex `float-to-int' instruction.
2621 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2622 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002623 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002624
2625 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002626 // if input >= (float)INT_MAX goto done
2627 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002628 __ j(kAboveEqual, &done);
2629 // if input == NaN goto nan
2630 __ j(kUnordered, &nan);
2631 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002632 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002633 __ jmp(&done);
2634 __ Bind(&nan);
2635 // output = 0
2636 __ xorl(output, output);
2637 __ Bind(&done);
2638 break;
2639 }
2640
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002641 case Primitive::kPrimDouble: {
2642 // Processing a Dex `double-to-int' instruction.
2643 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2644 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002645 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002646
2647 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002648 // if input >= (double)INT_MAX goto done
2649 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002650 __ j(kAboveEqual, &done);
2651 // if input == NaN goto nan
2652 __ j(kUnordered, &nan);
2653 // output = double-to-int-truncate(input)
2654 __ cvttsd2si(output, input);
2655 __ jmp(&done);
2656 __ Bind(&nan);
2657 // output = 0
2658 __ xorl(output, output);
2659 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002660 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002661 }
Roland Levillain946e1432014-11-11 17:35:19 +00002662
2663 default:
2664 LOG(FATAL) << "Unexpected type conversion from " << input_type
2665 << " to " << result_type;
2666 }
2667 break;
2668
Roland Levillaindff1f282014-11-05 14:15:05 +00002669 case Primitive::kPrimLong:
2670 switch (input_type) {
2671 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002672 case Primitive::kPrimBoolean:
2673 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002674 case Primitive::kPrimByte:
2675 case Primitive::kPrimShort:
2676 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002677 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002678 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002679 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002680 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002681 break;
2682
Roland Levillain624279f2014-12-04 11:54:28 +00002683 case Primitive::kPrimFloat: {
2684 // Processing a Dex `float-to-long' instruction.
2685 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2686 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002687 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002688
Mark Mendell92e83bf2015-05-07 11:25:03 -04002689 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002690 // if input >= (float)LONG_MAX goto done
2691 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002692 __ j(kAboveEqual, &done);
2693 // if input == NaN goto nan
2694 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002695 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002696 __ cvttss2si(output, input, true);
2697 __ jmp(&done);
2698 __ Bind(&nan);
2699 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002700 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002701 __ Bind(&done);
2702 break;
2703 }
2704
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002705 case Primitive::kPrimDouble: {
2706 // Processing a Dex `double-to-long' instruction.
2707 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2708 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002709 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002710
Mark Mendell92e83bf2015-05-07 11:25:03 -04002711 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002712 // if input >= (double)LONG_MAX goto done
2713 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002714 __ j(kAboveEqual, &done);
2715 // if input == NaN goto nan
2716 __ j(kUnordered, &nan);
2717 // output = double-to-long-truncate(input)
2718 __ cvttsd2si(output, input, true);
2719 __ jmp(&done);
2720 __ Bind(&nan);
2721 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002722 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002723 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002724 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002725 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002726
2727 default:
2728 LOG(FATAL) << "Unexpected type conversion from " << input_type
2729 << " to " << result_type;
2730 }
2731 break;
2732
Roland Levillain981e4542014-11-14 11:47:14 +00002733 case Primitive::kPrimChar:
2734 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002735 case Primitive::kPrimLong:
2736 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002737 case Primitive::kPrimBoolean:
2738 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002739 case Primitive::kPrimByte:
2740 case Primitive::kPrimShort:
2741 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002742 // Processing a Dex `int-to-char' instruction.
2743 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002744 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002745 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002746 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002747 Address(CpuRegister(RSP), in.GetStackIndex()));
2748 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002749 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002750 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002751 }
2752 break;
2753
2754 default:
2755 LOG(FATAL) << "Unexpected type conversion from " << input_type
2756 << " to " << result_type;
2757 }
2758 break;
2759
Roland Levillaindff1f282014-11-05 14:15:05 +00002760 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002761 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002762 case Primitive::kPrimBoolean:
2763 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002764 case Primitive::kPrimByte:
2765 case Primitive::kPrimShort:
2766 case Primitive::kPrimInt:
2767 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002768 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002769 if (in.IsRegister()) {
2770 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2771 } else if (in.IsConstant()) {
2772 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2773 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002774 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002775 } else {
2776 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2777 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2778 }
Roland Levillaincff13742014-11-17 14:32:17 +00002779 break;
2780
2781 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002782 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002783 if (in.IsRegister()) {
2784 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2785 } else if (in.IsConstant()) {
2786 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2787 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002788 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002789 } else {
2790 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2791 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2792 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002793 break;
2794
Roland Levillaincff13742014-11-17 14:32:17 +00002795 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002796 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002797 if (in.IsFpuRegister()) {
2798 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2799 } else if (in.IsConstant()) {
2800 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2801 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002802 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002803 } else {
2804 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2805 Address(CpuRegister(RSP), in.GetStackIndex()));
2806 }
Roland Levillaincff13742014-11-17 14:32:17 +00002807 break;
2808
2809 default:
2810 LOG(FATAL) << "Unexpected type conversion from " << input_type
2811 << " to " << result_type;
2812 };
2813 break;
2814
Roland Levillaindff1f282014-11-05 14:15:05 +00002815 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002816 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002817 case Primitive::kPrimBoolean:
2818 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002819 case Primitive::kPrimByte:
2820 case Primitive::kPrimShort:
2821 case Primitive::kPrimInt:
2822 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002823 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002824 if (in.IsRegister()) {
2825 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2826 } else if (in.IsConstant()) {
2827 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2828 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002829 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002830 } else {
2831 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2832 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2833 }
Roland Levillaincff13742014-11-17 14:32:17 +00002834 break;
2835
2836 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002837 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002838 if (in.IsRegister()) {
2839 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2840 } else if (in.IsConstant()) {
2841 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2842 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002843 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002844 } else {
2845 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2846 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2847 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002848 break;
2849
Roland Levillaincff13742014-11-17 14:32:17 +00002850 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002851 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002852 if (in.IsFpuRegister()) {
2853 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2854 } else if (in.IsConstant()) {
2855 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2856 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002857 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002858 } else {
2859 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2860 Address(CpuRegister(RSP), in.GetStackIndex()));
2861 }
Roland Levillaincff13742014-11-17 14:32:17 +00002862 break;
2863
2864 default:
2865 LOG(FATAL) << "Unexpected type conversion from " << input_type
2866 << " to " << result_type;
2867 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002868 break;
2869
2870 default:
2871 LOG(FATAL) << "Unexpected type conversion from " << input_type
2872 << " to " << result_type;
2873 }
2874}
2875
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002876void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002877 LocationSummary* locations =
2878 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002879 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002880 case Primitive::kPrimInt: {
2881 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002882 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2883 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002884 break;
2885 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002886
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002887 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002888 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002889 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002890 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002891 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002892 break;
2893 }
2894
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002895 case Primitive::kPrimDouble:
2896 case Primitive::kPrimFloat: {
2897 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002898 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002899 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002900 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002901 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002902
2903 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002904 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002905 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002906}
2907
2908void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2909 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002910 Location first = locations->InAt(0);
2911 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002912 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002913
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002914 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002915 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002916 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002917 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2918 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002919 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2920 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002921 } else {
2922 __ leal(out.AsRegister<CpuRegister>(), Address(
2923 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2924 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002925 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002926 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2927 __ addl(out.AsRegister<CpuRegister>(),
2928 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2929 } else {
2930 __ leal(out.AsRegister<CpuRegister>(), Address(
2931 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2932 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002933 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002934 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002935 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002936 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002937 break;
2938 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002939
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002940 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002941 if (second.IsRegister()) {
2942 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2943 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002944 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2945 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002946 } else {
2947 __ leaq(out.AsRegister<CpuRegister>(), Address(
2948 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2949 }
2950 } else {
2951 DCHECK(second.IsConstant());
2952 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2953 int32_t int32_value = Low32Bits(value);
2954 DCHECK_EQ(int32_value, value);
2955 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2956 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2957 } else {
2958 __ leaq(out.AsRegister<CpuRegister>(), Address(
2959 first.AsRegister<CpuRegister>(), int32_value));
2960 }
2961 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002962 break;
2963 }
2964
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002965 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002966 if (second.IsFpuRegister()) {
2967 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2968 } else if (second.IsConstant()) {
2969 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002970 codegen_->LiteralFloatAddress(
2971 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002972 } else {
2973 DCHECK(second.IsStackSlot());
2974 __ addss(first.AsFpuRegister<XmmRegister>(),
2975 Address(CpuRegister(RSP), second.GetStackIndex()));
2976 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002977 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002978 }
2979
2980 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002981 if (second.IsFpuRegister()) {
2982 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2983 } else if (second.IsConstant()) {
2984 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002985 codegen_->LiteralDoubleAddress(
2986 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002987 } else {
2988 DCHECK(second.IsDoubleStackSlot());
2989 __ addsd(first.AsFpuRegister<XmmRegister>(),
2990 Address(CpuRegister(RSP), second.GetStackIndex()));
2991 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002992 break;
2993 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002994
2995 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002996 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002997 }
2998}
2999
3000void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003001 LocationSummary* locations =
3002 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003003 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003004 case Primitive::kPrimInt: {
3005 locations->SetInAt(0, Location::RequiresRegister());
3006 locations->SetInAt(1, Location::Any());
3007 locations->SetOut(Location::SameAsFirstInput());
3008 break;
3009 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003010 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003011 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003012 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003013 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003014 break;
3015 }
Calin Juravle11351682014-10-23 15:38:15 +01003016 case Primitive::kPrimFloat:
3017 case Primitive::kPrimDouble: {
3018 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003019 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003020 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003021 break;
Calin Juravle11351682014-10-23 15:38:15 +01003022 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003023 default:
Calin Juravle11351682014-10-23 15:38:15 +01003024 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003025 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003026}
3027
3028void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3029 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003030 Location first = locations->InAt(0);
3031 Location second = locations->InAt(1);
3032 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003033 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003034 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003035 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003036 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003037 } else if (second.IsConstant()) {
3038 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003039 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003040 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003041 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003042 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003043 break;
3044 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003045 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003046 if (second.IsConstant()) {
3047 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3048 DCHECK(IsInt<32>(value));
3049 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3050 } else {
3051 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3052 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003053 break;
3054 }
3055
Calin Juravle11351682014-10-23 15:38:15 +01003056 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003057 if (second.IsFpuRegister()) {
3058 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3059 } else if (second.IsConstant()) {
3060 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003061 codegen_->LiteralFloatAddress(
3062 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003063 } else {
3064 DCHECK(second.IsStackSlot());
3065 __ subss(first.AsFpuRegister<XmmRegister>(),
3066 Address(CpuRegister(RSP), second.GetStackIndex()));
3067 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003068 break;
Calin Juravle11351682014-10-23 15:38:15 +01003069 }
3070
3071 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003072 if (second.IsFpuRegister()) {
3073 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3074 } else if (second.IsConstant()) {
3075 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003076 codegen_->LiteralDoubleAddress(
3077 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003078 } else {
3079 DCHECK(second.IsDoubleStackSlot());
3080 __ subsd(first.AsFpuRegister<XmmRegister>(),
3081 Address(CpuRegister(RSP), second.GetStackIndex()));
3082 }
Calin Juravle11351682014-10-23 15:38:15 +01003083 break;
3084 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003085
3086 default:
Calin Juravle11351682014-10-23 15:38:15 +01003087 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003088 }
3089}
3090
Calin Juravle34bacdf2014-10-07 20:23:36 +01003091void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3092 LocationSummary* locations =
3093 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3094 switch (mul->GetResultType()) {
3095 case Primitive::kPrimInt: {
3096 locations->SetInAt(0, Location::RequiresRegister());
3097 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003098 if (mul->InputAt(1)->IsIntConstant()) {
3099 // Can use 3 operand multiply.
3100 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3101 } else {
3102 locations->SetOut(Location::SameAsFirstInput());
3103 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003104 break;
3105 }
3106 case Primitive::kPrimLong: {
3107 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003108 locations->SetInAt(1, Location::Any());
3109 if (mul->InputAt(1)->IsLongConstant() &&
3110 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003111 // Can use 3 operand multiply.
3112 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3113 } else {
3114 locations->SetOut(Location::SameAsFirstInput());
3115 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003116 break;
3117 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003118 case Primitive::kPrimFloat:
3119 case Primitive::kPrimDouble: {
3120 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003121 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003122 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003123 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003124 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003125
3126 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003127 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003128 }
3129}
3130
3131void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3132 LocationSummary* locations = mul->GetLocations();
3133 Location first = locations->InAt(0);
3134 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003135 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003136 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003137 case Primitive::kPrimInt:
3138 // The constant may have ended up in a register, so test explicitly to avoid
3139 // problems where the output may not be the same as the first operand.
3140 if (mul->InputAt(1)->IsIntConstant()) {
3141 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3142 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3143 } else if (second.IsRegister()) {
3144 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003145 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003146 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003147 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003148 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003149 __ imull(first.AsRegister<CpuRegister>(),
3150 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003151 }
3152 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003153 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003154 // The constant may have ended up in a register, so test explicitly to avoid
3155 // problems where the output may not be the same as the first operand.
3156 if (mul->InputAt(1)->IsLongConstant()) {
3157 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3158 if (IsInt<32>(value)) {
3159 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3160 Immediate(static_cast<int32_t>(value)));
3161 } else {
3162 // Have to use the constant area.
3163 DCHECK(first.Equals(out));
3164 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3165 }
3166 } else if (second.IsRegister()) {
3167 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003168 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003169 } else {
3170 DCHECK(second.IsDoubleStackSlot());
3171 DCHECK(first.Equals(out));
3172 __ imulq(first.AsRegister<CpuRegister>(),
3173 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003174 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003175 break;
3176 }
3177
Calin Juravleb5bfa962014-10-21 18:02:24 +01003178 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003179 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003180 if (second.IsFpuRegister()) {
3181 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3182 } else if (second.IsConstant()) {
3183 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003184 codegen_->LiteralFloatAddress(
3185 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003186 } else {
3187 DCHECK(second.IsStackSlot());
3188 __ mulss(first.AsFpuRegister<XmmRegister>(),
3189 Address(CpuRegister(RSP), second.GetStackIndex()));
3190 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003191 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003192 }
3193
3194 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003195 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003196 if (second.IsFpuRegister()) {
3197 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3198 } else if (second.IsConstant()) {
3199 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003200 codegen_->LiteralDoubleAddress(
3201 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003202 } else {
3203 DCHECK(second.IsDoubleStackSlot());
3204 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3205 Address(CpuRegister(RSP), second.GetStackIndex()));
3206 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003207 break;
3208 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003209
3210 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003211 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003212 }
3213}
3214
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003215void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3216 uint32_t stack_adjustment, bool is_float) {
3217 if (source.IsStackSlot()) {
3218 DCHECK(is_float);
3219 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3220 } else if (source.IsDoubleStackSlot()) {
3221 DCHECK(!is_float);
3222 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3223 } else {
3224 // Write the value to the temporary location on the stack and load to FP stack.
3225 if (is_float) {
3226 Location stack_temp = Location::StackSlot(temp_offset);
3227 codegen_->Move(stack_temp, source);
3228 __ flds(Address(CpuRegister(RSP), temp_offset));
3229 } else {
3230 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3231 codegen_->Move(stack_temp, source);
3232 __ fldl(Address(CpuRegister(RSP), temp_offset));
3233 }
3234 }
3235}
3236
3237void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3238 Primitive::Type type = rem->GetResultType();
3239 bool is_float = type == Primitive::kPrimFloat;
3240 size_t elem_size = Primitive::ComponentSize(type);
3241 LocationSummary* locations = rem->GetLocations();
3242 Location first = locations->InAt(0);
3243 Location second = locations->InAt(1);
3244 Location out = locations->Out();
3245
3246 // Create stack space for 2 elements.
3247 // TODO: enhance register allocator to ask for stack temporaries.
3248 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3249
3250 // Load the values to the FP stack in reverse order, using temporaries if needed.
3251 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3252 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3253
3254 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003255 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003256 __ Bind(&retry);
3257 __ fprem();
3258
3259 // Move FP status to AX.
3260 __ fstsw();
3261
3262 // And see if the argument reduction is complete. This is signaled by the
3263 // C2 FPU flag bit set to 0.
3264 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3265 __ j(kNotEqual, &retry);
3266
3267 // We have settled on the final value. Retrieve it into an XMM register.
3268 // Store FP top of stack to real stack.
3269 if (is_float) {
3270 __ fsts(Address(CpuRegister(RSP), 0));
3271 } else {
3272 __ fstl(Address(CpuRegister(RSP), 0));
3273 }
3274
3275 // Pop the 2 items from the FP stack.
3276 __ fucompp();
3277
3278 // Load the value from the stack into an XMM register.
3279 DCHECK(out.IsFpuRegister()) << out;
3280 if (is_float) {
3281 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3282 } else {
3283 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3284 }
3285
3286 // And remove the temporary stack space we allocated.
3287 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3288}
3289
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003290void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3291 DCHECK(instruction->IsDiv() || instruction->IsRem());
3292
3293 LocationSummary* locations = instruction->GetLocations();
3294 Location second = locations->InAt(1);
3295 DCHECK(second.IsConstant());
3296
3297 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3298 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003299 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003300
3301 DCHECK(imm == 1 || imm == -1);
3302
3303 switch (instruction->GetResultType()) {
3304 case Primitive::kPrimInt: {
3305 if (instruction->IsRem()) {
3306 __ xorl(output_register, output_register);
3307 } else {
3308 __ movl(output_register, input_register);
3309 if (imm == -1) {
3310 __ negl(output_register);
3311 }
3312 }
3313 break;
3314 }
3315
3316 case Primitive::kPrimLong: {
3317 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003318 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003319 } else {
3320 __ movq(output_register, input_register);
3321 if (imm == -1) {
3322 __ negq(output_register);
3323 }
3324 }
3325 break;
3326 }
3327
3328 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003329 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003330 }
3331}
3332
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003333void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003334 LocationSummary* locations = instruction->GetLocations();
3335 Location second = locations->InAt(1);
3336
3337 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3338 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3339
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003340 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003341 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3342 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003343
3344 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3345
3346 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003347 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003348 __ testl(numerator, numerator);
3349 __ cmov(kGreaterEqual, tmp, numerator);
3350 int shift = CTZ(imm);
3351 __ sarl(tmp, Immediate(shift));
3352
3353 if (imm < 0) {
3354 __ negl(tmp);
3355 }
3356
3357 __ movl(output_register, tmp);
3358 } else {
3359 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3360 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3361
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003362 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003363 __ addq(rdx, numerator);
3364 __ testq(numerator, numerator);
3365 __ cmov(kGreaterEqual, rdx, numerator);
3366 int shift = CTZ(imm);
3367 __ sarq(rdx, Immediate(shift));
3368
3369 if (imm < 0) {
3370 __ negq(rdx);
3371 }
3372
3373 __ movq(output_register, rdx);
3374 }
3375}
3376
3377void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3378 DCHECK(instruction->IsDiv() || instruction->IsRem());
3379
3380 LocationSummary* locations = instruction->GetLocations();
3381 Location second = locations->InAt(1);
3382
3383 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3384 : locations->GetTemp(0).AsRegister<CpuRegister>();
3385 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3386 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3387 : locations->Out().AsRegister<CpuRegister>();
3388 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3389
3390 DCHECK_EQ(RAX, eax.AsRegister());
3391 DCHECK_EQ(RDX, edx.AsRegister());
3392 if (instruction->IsDiv()) {
3393 DCHECK_EQ(RAX, out.AsRegister());
3394 } else {
3395 DCHECK_EQ(RDX, out.AsRegister());
3396 }
3397
3398 int64_t magic;
3399 int shift;
3400
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003401 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003402 if (instruction->GetResultType() == Primitive::kPrimInt) {
3403 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3404
3405 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3406
3407 __ movl(numerator, eax);
3408
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003409 __ movl(eax, Immediate(magic));
3410 __ imull(numerator);
3411
3412 if (imm > 0 && magic < 0) {
3413 __ addl(edx, numerator);
3414 } else if (imm < 0 && magic > 0) {
3415 __ subl(edx, numerator);
3416 }
3417
3418 if (shift != 0) {
3419 __ sarl(edx, Immediate(shift));
3420 }
3421
3422 __ movl(eax, edx);
3423 __ shrl(edx, Immediate(31));
3424 __ addl(edx, eax);
3425
3426 if (instruction->IsRem()) {
3427 __ movl(eax, numerator);
3428 __ imull(edx, Immediate(imm));
3429 __ subl(eax, edx);
3430 __ movl(edx, eax);
3431 } else {
3432 __ movl(eax, edx);
3433 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003434 } else {
3435 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3436
3437 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3438
3439 CpuRegister rax = eax;
3440 CpuRegister rdx = edx;
3441
3442 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3443
3444 // Save the numerator.
3445 __ movq(numerator, rax);
3446
3447 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003448 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003449
3450 // RDX:RAX = magic * numerator
3451 __ imulq(numerator);
3452
3453 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003454 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003455 __ addq(rdx, numerator);
3456 } else if (imm < 0 && magic > 0) {
3457 // RDX -= numerator
3458 __ subq(rdx, numerator);
3459 }
3460
3461 // Shift if needed.
3462 if (shift != 0) {
3463 __ sarq(rdx, Immediate(shift));
3464 }
3465
3466 // RDX += 1 if RDX < 0
3467 __ movq(rax, rdx);
3468 __ shrq(rdx, Immediate(63));
3469 __ addq(rdx, rax);
3470
3471 if (instruction->IsRem()) {
3472 __ movq(rax, numerator);
3473
3474 if (IsInt<32>(imm)) {
3475 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3476 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003477 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003478 }
3479
3480 __ subq(rax, rdx);
3481 __ movq(rdx, rax);
3482 } else {
3483 __ movq(rax, rdx);
3484 }
3485 }
3486}
3487
Calin Juravlebacfec32014-11-14 15:54:36 +00003488void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3489 DCHECK(instruction->IsDiv() || instruction->IsRem());
3490 Primitive::Type type = instruction->GetResultType();
3491 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3492
3493 bool is_div = instruction->IsDiv();
3494 LocationSummary* locations = instruction->GetLocations();
3495
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003496 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3497 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003498
Roland Levillain271ab9c2014-11-27 15:23:57 +00003499 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003500 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003501
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003502 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003503 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003504
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003505 if (imm == 0) {
3506 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3507 } else if (imm == 1 || imm == -1) {
3508 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003509 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003510 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003511 } else {
3512 DCHECK(imm <= -2 || imm >= 2);
3513 GenerateDivRemWithAnyConstant(instruction);
3514 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003515 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003516 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003517 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003518 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003519 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003520
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003521 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3522 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3523 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3524 // so it's safe to just use negl instead of more complex comparisons.
3525 if (type == Primitive::kPrimInt) {
3526 __ cmpl(second_reg, Immediate(-1));
3527 __ j(kEqual, slow_path->GetEntryLabel());
3528 // edx:eax <- sign-extended of eax
3529 __ cdq();
3530 // eax = quotient, edx = remainder
3531 __ idivl(second_reg);
3532 } else {
3533 __ cmpq(second_reg, Immediate(-1));
3534 __ j(kEqual, slow_path->GetEntryLabel());
3535 // rdx:rax <- sign-extended of rax
3536 __ cqo();
3537 // rax = quotient, rdx = remainder
3538 __ idivq(second_reg);
3539 }
3540 __ Bind(slow_path->GetExitLabel());
3541 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003542}
3543
Calin Juravle7c4954d2014-10-28 16:57:40 +00003544void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3545 LocationSummary* locations =
3546 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3547 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003548 case Primitive::kPrimInt:
3549 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003550 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003551 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003552 locations->SetOut(Location::SameAsFirstInput());
3553 // Intel uses edx:eax as the dividend.
3554 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003555 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3556 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3557 // output and request another temp.
3558 if (div->InputAt(1)->IsConstant()) {
3559 locations->AddTemp(Location::RequiresRegister());
3560 }
Calin Juravled0d48522014-11-04 16:40:20 +00003561 break;
3562 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003563
Calin Juravle7c4954d2014-10-28 16:57:40 +00003564 case Primitive::kPrimFloat:
3565 case Primitive::kPrimDouble: {
3566 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003567 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003568 locations->SetOut(Location::SameAsFirstInput());
3569 break;
3570 }
3571
3572 default:
3573 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3574 }
3575}
3576
3577void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3578 LocationSummary* locations = div->GetLocations();
3579 Location first = locations->InAt(0);
3580 Location second = locations->InAt(1);
3581 DCHECK(first.Equals(locations->Out()));
3582
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003583 Primitive::Type type = div->GetResultType();
3584 switch (type) {
3585 case Primitive::kPrimInt:
3586 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003587 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003588 break;
3589 }
3590
Calin Juravle7c4954d2014-10-28 16:57:40 +00003591 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003592 if (second.IsFpuRegister()) {
3593 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3594 } else if (second.IsConstant()) {
3595 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003596 codegen_->LiteralFloatAddress(
3597 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003598 } else {
3599 DCHECK(second.IsStackSlot());
3600 __ divss(first.AsFpuRegister<XmmRegister>(),
3601 Address(CpuRegister(RSP), second.GetStackIndex()));
3602 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003603 break;
3604 }
3605
3606 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003607 if (second.IsFpuRegister()) {
3608 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3609 } else if (second.IsConstant()) {
3610 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003611 codegen_->LiteralDoubleAddress(
3612 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003613 } else {
3614 DCHECK(second.IsDoubleStackSlot());
3615 __ divsd(first.AsFpuRegister<XmmRegister>(),
3616 Address(CpuRegister(RSP), second.GetStackIndex()));
3617 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003618 break;
3619 }
3620
3621 default:
3622 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3623 }
3624}
3625
Calin Juravlebacfec32014-11-14 15:54:36 +00003626void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003627 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003628 LocationSummary* locations =
3629 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003630
3631 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003632 case Primitive::kPrimInt:
3633 case Primitive::kPrimLong: {
3634 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003635 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003636 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3637 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003638 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3639 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3640 // output and request another temp.
3641 if (rem->InputAt(1)->IsConstant()) {
3642 locations->AddTemp(Location::RequiresRegister());
3643 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003644 break;
3645 }
3646
3647 case Primitive::kPrimFloat:
3648 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003649 locations->SetInAt(0, Location::Any());
3650 locations->SetInAt(1, Location::Any());
3651 locations->SetOut(Location::RequiresFpuRegister());
3652 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003653 break;
3654 }
3655
3656 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003657 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003658 }
3659}
3660
3661void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3662 Primitive::Type type = rem->GetResultType();
3663 switch (type) {
3664 case Primitive::kPrimInt:
3665 case Primitive::kPrimLong: {
3666 GenerateDivRemIntegral(rem);
3667 break;
3668 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003669 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003670 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003671 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003672 break;
3673 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003674 default:
3675 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3676 }
3677}
3678
Calin Juravled0d48522014-11-04 16:40:20 +00003679void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003680 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3681 ? LocationSummary::kCallOnSlowPath
3682 : LocationSummary::kNoCall;
3683 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003684 locations->SetInAt(0, Location::Any());
3685 if (instruction->HasUses()) {
3686 locations->SetOut(Location::SameAsFirstInput());
3687 }
3688}
3689
3690void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003691 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003692 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3693 codegen_->AddSlowPath(slow_path);
3694
3695 LocationSummary* locations = instruction->GetLocations();
3696 Location value = locations->InAt(0);
3697
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003698 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003699 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003700 case Primitive::kPrimByte:
3701 case Primitive::kPrimChar:
3702 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003703 case Primitive::kPrimInt: {
3704 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003705 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003706 __ j(kEqual, slow_path->GetEntryLabel());
3707 } else if (value.IsStackSlot()) {
3708 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3709 __ j(kEqual, slow_path->GetEntryLabel());
3710 } else {
3711 DCHECK(value.IsConstant()) << value;
3712 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3713 __ jmp(slow_path->GetEntryLabel());
3714 }
3715 }
3716 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003717 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003718 case Primitive::kPrimLong: {
3719 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003720 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003721 __ j(kEqual, slow_path->GetEntryLabel());
3722 } else if (value.IsDoubleStackSlot()) {
3723 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3724 __ j(kEqual, slow_path->GetEntryLabel());
3725 } else {
3726 DCHECK(value.IsConstant()) << value;
3727 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3728 __ jmp(slow_path->GetEntryLabel());
3729 }
3730 }
3731 break;
3732 }
3733 default:
3734 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003735 }
Calin Juravled0d48522014-11-04 16:40:20 +00003736}
3737
Calin Juravle9aec02f2014-11-18 23:06:35 +00003738void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3739 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3740
3741 LocationSummary* locations =
3742 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3743
3744 switch (op->GetResultType()) {
3745 case Primitive::kPrimInt:
3746 case Primitive::kPrimLong: {
3747 locations->SetInAt(0, Location::RequiresRegister());
3748 // The shift count needs to be in CL.
3749 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3750 locations->SetOut(Location::SameAsFirstInput());
3751 break;
3752 }
3753 default:
3754 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3755 }
3756}
3757
3758void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3759 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3760
3761 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003762 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003763 Location second = locations->InAt(1);
3764
3765 switch (op->GetResultType()) {
3766 case Primitive::kPrimInt: {
3767 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003768 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003769 if (op->IsShl()) {
3770 __ shll(first_reg, second_reg);
3771 } else if (op->IsShr()) {
3772 __ sarl(first_reg, second_reg);
3773 } else {
3774 __ shrl(first_reg, second_reg);
3775 }
3776 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003777 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003778 if (op->IsShl()) {
3779 __ shll(first_reg, imm);
3780 } else if (op->IsShr()) {
3781 __ sarl(first_reg, imm);
3782 } else {
3783 __ shrl(first_reg, imm);
3784 }
3785 }
3786 break;
3787 }
3788 case Primitive::kPrimLong: {
3789 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003790 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003791 if (op->IsShl()) {
3792 __ shlq(first_reg, second_reg);
3793 } else if (op->IsShr()) {
3794 __ sarq(first_reg, second_reg);
3795 } else {
3796 __ shrq(first_reg, second_reg);
3797 }
3798 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003799 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003800 if (op->IsShl()) {
3801 __ shlq(first_reg, imm);
3802 } else if (op->IsShr()) {
3803 __ sarq(first_reg, imm);
3804 } else {
3805 __ shrq(first_reg, imm);
3806 }
3807 }
3808 break;
3809 }
3810 default:
3811 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003812 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003813 }
3814}
3815
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003816void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3817 LocationSummary* locations =
3818 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3819
3820 switch (ror->GetResultType()) {
3821 case Primitive::kPrimInt:
3822 case Primitive::kPrimLong: {
3823 locations->SetInAt(0, Location::RequiresRegister());
3824 // The shift count needs to be in CL (unless it is a constant).
3825 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3826 locations->SetOut(Location::SameAsFirstInput());
3827 break;
3828 }
3829 default:
3830 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3831 UNREACHABLE();
3832 }
3833}
3834
3835void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3836 LocationSummary* locations = ror->GetLocations();
3837 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3838 Location second = locations->InAt(1);
3839
3840 switch (ror->GetResultType()) {
3841 case Primitive::kPrimInt:
3842 if (second.IsRegister()) {
3843 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3844 __ rorl(first_reg, second_reg);
3845 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003846 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003847 __ rorl(first_reg, imm);
3848 }
3849 break;
3850 case Primitive::kPrimLong:
3851 if (second.IsRegister()) {
3852 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3853 __ rorq(first_reg, second_reg);
3854 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003855 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003856 __ rorq(first_reg, imm);
3857 }
3858 break;
3859 default:
3860 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3861 UNREACHABLE();
3862 }
3863}
3864
Calin Juravle9aec02f2014-11-18 23:06:35 +00003865void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3866 HandleShift(shl);
3867}
3868
3869void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3870 HandleShift(shl);
3871}
3872
3873void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3874 HandleShift(shr);
3875}
3876
3877void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3878 HandleShift(shr);
3879}
3880
3881void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3882 HandleShift(ushr);
3883}
3884
3885void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3886 HandleShift(ushr);
3887}
3888
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003889void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003890 LocationSummary* locations =
3891 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003892 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003893 if (instruction->IsStringAlloc()) {
3894 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3895 } else {
3896 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3897 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3898 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003899 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003900}
3901
3902void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003903 // Note: if heap poisoning is enabled, the entry point takes cares
3904 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003905 if (instruction->IsStringAlloc()) {
3906 // String is allocated through StringFactory. Call NewEmptyString entry point.
3907 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3908 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3909 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3910 __ call(Address(temp, code_offset.SizeValue()));
3911 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3912 } else {
3913 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3914 instruction,
3915 instruction->GetDexPc(),
3916 nullptr);
3917 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3918 DCHECK(!codegen_->IsLeafMethod());
3919 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003920}
3921
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003922void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3923 LocationSummary* locations =
3924 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3925 InvokeRuntimeCallingConvention calling_convention;
3926 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003927 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003928 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003929 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003930}
3931
3932void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3933 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003934 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3935 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003936 // Note: if heap poisoning is enabled, the entry point takes cares
3937 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003938 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3939 instruction,
3940 instruction->GetDexPc(),
3941 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003942 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003943
3944 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003945}
3946
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003947void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003948 LocationSummary* locations =
3949 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003950 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3951 if (location.IsStackSlot()) {
3952 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3953 } else if (location.IsDoubleStackSlot()) {
3954 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3955 }
3956 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003957}
3958
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003959void InstructionCodeGeneratorX86_64::VisitParameterValue(
3960 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003961 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003962}
3963
3964void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3965 LocationSummary* locations =
3966 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3967 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3968}
3969
3970void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3971 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3972 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003973}
3974
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003975void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3976 LocationSummary* locations =
3977 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3978 locations->SetInAt(0, Location::RequiresRegister());
3979 locations->SetOut(Location::RequiresRegister());
3980}
3981
3982void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3983 LocationSummary* locations = instruction->GetLocations();
3984 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00003985 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003986 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3987 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
3988 } else {
Nelli Kimbadee982016-05-13 13:08:53 +03003989 __ movq(locations->Out().AsRegister<CpuRegister>(),
3990 Address(locations->InAt(0).AsRegister<CpuRegister>(),
3991 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
3992 method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07003993 instruction->GetIndex(), kX86_64PointerSize));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003994 }
3995 __ movq(locations->Out().AsRegister<CpuRegister>(),
3996 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
3997}
3998
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003999void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004000 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004001 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004002 locations->SetInAt(0, Location::RequiresRegister());
4003 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004004}
4005
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004006void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4007 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004008 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4009 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004010 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004011 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004012 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004013 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004014 break;
4015
4016 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004017 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004018 break;
4019
4020 default:
4021 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4022 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004023}
4024
David Brazdil66d126e2015-04-03 16:02:44 +01004025void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4026 LocationSummary* locations =
4027 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4028 locations->SetInAt(0, Location::RequiresRegister());
4029 locations->SetOut(Location::SameAsFirstInput());
4030}
4031
4032void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004033 LocationSummary* locations = bool_not->GetLocations();
4034 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4035 locations->Out().AsRegister<CpuRegister>().AsRegister());
4036 Location out = locations->Out();
4037 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4038}
4039
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004040void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004041 LocationSummary* locations =
4042 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004043 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004044 locations->SetInAt(i, Location::Any());
4045 }
4046 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004047}
4048
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004049void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004050 LOG(FATAL) << "Unimplemented";
4051}
4052
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004053void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004054 /*
4055 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004056 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004057 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4058 */
4059 switch (kind) {
4060 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004061 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004062 break;
4063 }
4064 case MemBarrierKind::kAnyStore:
4065 case MemBarrierKind::kLoadAny:
4066 case MemBarrierKind::kStoreStore: {
4067 // nop
4068 break;
4069 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004070 case MemBarrierKind::kNTStoreStore:
4071 // Non-Temporal Store/Store needs an explicit fence.
4072 MemoryFence(/* non-temporal */ true);
4073 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004074 }
4075}
4076
4077void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4078 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4079
Roland Levillain0d5a2812015-11-13 10:07:31 +00004080 bool object_field_get_with_read_barrier =
4081 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004082 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004083 new (GetGraph()->GetArena()) LocationSummary(instruction,
4084 object_field_get_with_read_barrier ?
4085 LocationSummary::kCallOnSlowPath :
4086 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004087 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004088 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4089 locations->SetOut(Location::RequiresFpuRegister());
4090 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004091 // The output overlaps for an object field get when read barriers
4092 // are enabled: we do not want the move to overwrite the object's
4093 // location, as we need it to emit the read barrier.
4094 locations->SetOut(
4095 Location::RequiresRegister(),
4096 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004097 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004098 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4099 // We need a temporary register for the read barrier marking slow
4100 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4101 locations->AddTemp(Location::RequiresRegister());
4102 }
Calin Juravle52c48962014-12-16 17:02:57 +00004103}
4104
4105void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4106 const FieldInfo& field_info) {
4107 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4108
4109 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004110 Location base_loc = locations->InAt(0);
4111 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004112 Location out = locations->Out();
4113 bool is_volatile = field_info.IsVolatile();
4114 Primitive::Type field_type = field_info.GetFieldType();
4115 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4116
4117 switch (field_type) {
4118 case Primitive::kPrimBoolean: {
4119 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4120 break;
4121 }
4122
4123 case Primitive::kPrimByte: {
4124 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4125 break;
4126 }
4127
4128 case Primitive::kPrimShort: {
4129 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4130 break;
4131 }
4132
4133 case Primitive::kPrimChar: {
4134 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4135 break;
4136 }
4137
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004138 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004139 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4140 break;
4141 }
4142
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004143 case Primitive::kPrimNot: {
4144 // /* HeapReference<Object> */ out = *(base + offset)
4145 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4146 Location temp_loc = locations->GetTemp(0);
4147 // Note that a potential implicit null check is handled in this
4148 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4149 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4150 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4151 if (is_volatile) {
4152 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4153 }
4154 } else {
4155 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4156 codegen_->MaybeRecordImplicitNullCheck(instruction);
4157 if (is_volatile) {
4158 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4159 }
4160 // If read barriers are enabled, emit read barriers other than
4161 // Baker's using a slow path (and also unpoison the loaded
4162 // reference, if heap poisoning is enabled).
4163 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4164 }
4165 break;
4166 }
4167
Calin Juravle52c48962014-12-16 17:02:57 +00004168 case Primitive::kPrimLong: {
4169 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4170 break;
4171 }
4172
4173 case Primitive::kPrimFloat: {
4174 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4175 break;
4176 }
4177
4178 case Primitive::kPrimDouble: {
4179 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4180 break;
4181 }
4182
4183 case Primitive::kPrimVoid:
4184 LOG(FATAL) << "Unreachable type " << field_type;
4185 UNREACHABLE();
4186 }
4187
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004188 if (field_type == Primitive::kPrimNot) {
4189 // Potential implicit null checks, in the case of reference
4190 // fields, are handled in the previous switch statement.
4191 } else {
4192 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004193 }
Roland Levillain4d027112015-07-01 15:41:14 +01004194
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004195 if (is_volatile) {
4196 if (field_type == Primitive::kPrimNot) {
4197 // Memory barriers, in the case of references, are also handled
4198 // in the previous switch statement.
4199 } else {
4200 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4201 }
Roland Levillain4d027112015-07-01 15:41:14 +01004202 }
Calin Juravle52c48962014-12-16 17:02:57 +00004203}
4204
4205void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4206 const FieldInfo& field_info) {
4207 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4208
4209 LocationSummary* locations =
4210 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004211 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004212 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004213 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004214 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004215
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004216 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004217 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004218 if (is_volatile) {
4219 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4220 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4221 } else {
4222 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4223 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004224 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004225 if (is_volatile) {
4226 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4227 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4228 } else {
4229 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4230 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004231 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004232 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004233 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004234 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004235 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004236 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4237 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004238 locations->AddTemp(Location::RequiresRegister());
4239 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004240}
4241
Calin Juravle52c48962014-12-16 17:02:57 +00004242void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004243 const FieldInfo& field_info,
4244 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004245 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4246
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004247 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004248 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4249 Location value = locations->InAt(1);
4250 bool is_volatile = field_info.IsVolatile();
4251 Primitive::Type field_type = field_info.GetFieldType();
4252 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4253
4254 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004255 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004256 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004257
Mark Mendellea5af682015-10-22 17:35:49 -04004258 bool maybe_record_implicit_null_check_done = false;
4259
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004260 switch (field_type) {
4261 case Primitive::kPrimBoolean:
4262 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004263 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004264 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004265 __ movb(Address(base, offset), Immediate(v));
4266 } else {
4267 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4268 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004269 break;
4270 }
4271
4272 case Primitive::kPrimShort:
4273 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004274 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004275 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004276 __ movw(Address(base, offset), Immediate(v));
4277 } else {
4278 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4279 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004280 break;
4281 }
4282
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004283 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004284 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004285 if (value.IsConstant()) {
4286 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004287 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4288 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4289 // Note: if heap poisoning is enabled, no need to poison
4290 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004291 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004292 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004293 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4294 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4295 __ movl(temp, value.AsRegister<CpuRegister>());
4296 __ PoisonHeapReference(temp);
4297 __ movl(Address(base, offset), temp);
4298 } else {
4299 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4300 }
Mark Mendell40741f32015-04-20 22:10:34 -04004301 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004302 break;
4303 }
4304
4305 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004306 if (value.IsConstant()) {
4307 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004308 codegen_->MoveInt64ToAddress(Address(base, offset),
4309 Address(base, offset + sizeof(int32_t)),
4310 v,
4311 instruction);
4312 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004313 } else {
4314 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4315 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004316 break;
4317 }
4318
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004319 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004320 if (value.IsConstant()) {
4321 int32_t v =
4322 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4323 __ movl(Address(base, offset), Immediate(v));
4324 } else {
4325 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4326 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004327 break;
4328 }
4329
4330 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004331 if (value.IsConstant()) {
4332 int64_t v =
4333 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4334 codegen_->MoveInt64ToAddress(Address(base, offset),
4335 Address(base, offset + sizeof(int32_t)),
4336 v,
4337 instruction);
4338 maybe_record_implicit_null_check_done = true;
4339 } else {
4340 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4341 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004342 break;
4343 }
4344
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004345 case Primitive::kPrimVoid:
4346 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004347 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004348 }
Calin Juravle52c48962014-12-16 17:02:57 +00004349
Mark Mendellea5af682015-10-22 17:35:49 -04004350 if (!maybe_record_implicit_null_check_done) {
4351 codegen_->MaybeRecordImplicitNullCheck(instruction);
4352 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004353
4354 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4355 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4356 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004357 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004358 }
4359
Calin Juravle52c48962014-12-16 17:02:57 +00004360 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004361 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004362 }
4363}
4364
4365void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4366 HandleFieldSet(instruction, instruction->GetFieldInfo());
4367}
4368
4369void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004370 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004371}
4372
4373void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004374 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004375}
4376
4377void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004378 HandleFieldGet(instruction, instruction->GetFieldInfo());
4379}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004380
Calin Juravle52c48962014-12-16 17:02:57 +00004381void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4382 HandleFieldGet(instruction);
4383}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004384
Calin Juravle52c48962014-12-16 17:02:57 +00004385void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4386 HandleFieldGet(instruction, instruction->GetFieldInfo());
4387}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004388
Calin Juravle52c48962014-12-16 17:02:57 +00004389void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4390 HandleFieldSet(instruction, instruction->GetFieldInfo());
4391}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004392
Calin Juravle52c48962014-12-16 17:02:57 +00004393void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004394 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004395}
4396
Calin Juravlee460d1d2015-09-29 04:52:17 +01004397void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4398 HUnresolvedInstanceFieldGet* instruction) {
4399 FieldAccessCallingConventionX86_64 calling_convention;
4400 codegen_->CreateUnresolvedFieldLocationSummary(
4401 instruction, instruction->GetFieldType(), calling_convention);
4402}
4403
4404void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4405 HUnresolvedInstanceFieldGet* instruction) {
4406 FieldAccessCallingConventionX86_64 calling_convention;
4407 codegen_->GenerateUnresolvedFieldAccess(instruction,
4408 instruction->GetFieldType(),
4409 instruction->GetFieldIndex(),
4410 instruction->GetDexPc(),
4411 calling_convention);
4412}
4413
4414void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4415 HUnresolvedInstanceFieldSet* instruction) {
4416 FieldAccessCallingConventionX86_64 calling_convention;
4417 codegen_->CreateUnresolvedFieldLocationSummary(
4418 instruction, instruction->GetFieldType(), calling_convention);
4419}
4420
4421void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4422 HUnresolvedInstanceFieldSet* instruction) {
4423 FieldAccessCallingConventionX86_64 calling_convention;
4424 codegen_->GenerateUnresolvedFieldAccess(instruction,
4425 instruction->GetFieldType(),
4426 instruction->GetFieldIndex(),
4427 instruction->GetDexPc(),
4428 calling_convention);
4429}
4430
4431void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4432 HUnresolvedStaticFieldGet* instruction) {
4433 FieldAccessCallingConventionX86_64 calling_convention;
4434 codegen_->CreateUnresolvedFieldLocationSummary(
4435 instruction, instruction->GetFieldType(), calling_convention);
4436}
4437
4438void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4439 HUnresolvedStaticFieldGet* instruction) {
4440 FieldAccessCallingConventionX86_64 calling_convention;
4441 codegen_->GenerateUnresolvedFieldAccess(instruction,
4442 instruction->GetFieldType(),
4443 instruction->GetFieldIndex(),
4444 instruction->GetDexPc(),
4445 calling_convention);
4446}
4447
4448void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4449 HUnresolvedStaticFieldSet* instruction) {
4450 FieldAccessCallingConventionX86_64 calling_convention;
4451 codegen_->CreateUnresolvedFieldLocationSummary(
4452 instruction, instruction->GetFieldType(), calling_convention);
4453}
4454
4455void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4456 HUnresolvedStaticFieldSet* instruction) {
4457 FieldAccessCallingConventionX86_64 calling_convention;
4458 codegen_->GenerateUnresolvedFieldAccess(instruction,
4459 instruction->GetFieldType(),
4460 instruction->GetFieldIndex(),
4461 instruction->GetDexPc(),
4462 calling_convention);
4463}
4464
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004465void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004466 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4467 ? LocationSummary::kCallOnSlowPath
4468 : LocationSummary::kNoCall;
4469 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4470 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004471 ? Location::RequiresRegister()
4472 : Location::Any();
4473 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004474 if (instruction->HasUses()) {
4475 locations->SetOut(Location::SameAsFirstInput());
4476 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004477}
4478
Calin Juravle2ae48182016-03-16 14:05:09 +00004479void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4480 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004481 return;
4482 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004483 LocationSummary* locations = instruction->GetLocations();
4484 Location obj = locations->InAt(0);
4485
4486 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004487 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004488}
4489
Calin Juravle2ae48182016-03-16 14:05:09 +00004490void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004491 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004492 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004493
4494 LocationSummary* locations = instruction->GetLocations();
4495 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004496
4497 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004498 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004499 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004500 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004501 } else {
4502 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004503 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004504 __ jmp(slow_path->GetEntryLabel());
4505 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004506 }
4507 __ j(kEqual, slow_path->GetEntryLabel());
4508}
4509
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004510void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004511 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004512}
4513
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004514void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004515 bool object_array_get_with_read_barrier =
4516 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004517 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004518 new (GetGraph()->GetArena()) LocationSummary(instruction,
4519 object_array_get_with_read_barrier ?
4520 LocationSummary::kCallOnSlowPath :
4521 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004522 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004523 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004524 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4525 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4526 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004527 // The output overlaps for an object array get when read barriers
4528 // are enabled: we do not want the move to overwrite the array's
4529 // location, as we need it to emit the read barrier.
4530 locations->SetOut(
4531 Location::RequiresRegister(),
4532 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004533 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004534 // We need a temporary register for the read barrier marking slow
4535 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4536 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4537 locations->AddTemp(Location::RequiresRegister());
4538 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004539}
4540
4541void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4542 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004543 Location obj_loc = locations->InAt(0);
4544 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004545 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004546 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004547 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004548
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004549 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004550 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004551 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004552 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004553 if (index.IsConstant()) {
4554 __ movzxb(out, Address(obj,
4555 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4556 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004557 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004558 }
4559 break;
4560 }
4561
4562 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004563 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004564 if (index.IsConstant()) {
4565 __ movsxb(out, Address(obj,
4566 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4567 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004568 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004569 }
4570 break;
4571 }
4572
4573 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004574 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004575 if (index.IsConstant()) {
4576 __ movsxw(out, Address(obj,
4577 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4578 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004579 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004580 }
4581 break;
4582 }
4583
4584 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004585 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004586 if (index.IsConstant()) {
4587 __ movzxw(out, Address(obj,
4588 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4589 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004590 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004591 }
4592 break;
4593 }
4594
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004595 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004596 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004597 if (index.IsConstant()) {
4598 __ movl(out, Address(obj,
4599 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4600 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004601 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004602 }
4603 break;
4604 }
4605
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004606 case Primitive::kPrimNot: {
4607 static_assert(
4608 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4609 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004610 // /* HeapReference<Object> */ out =
4611 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4612 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4613 Location temp = locations->GetTemp(0);
4614 // Note that a potential implicit null check is handled in this
4615 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4616 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4617 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4618 } else {
4619 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4620 if (index.IsConstant()) {
4621 uint32_t offset =
4622 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4623 __ movl(out, Address(obj, offset));
4624 codegen_->MaybeRecordImplicitNullCheck(instruction);
4625 // If read barriers are enabled, emit read barriers other than
4626 // Baker's using a slow path (and also unpoison the loaded
4627 // reference, if heap poisoning is enabled).
4628 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4629 } else {
4630 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4631 codegen_->MaybeRecordImplicitNullCheck(instruction);
4632 // If read barriers are enabled, emit read barriers other than
4633 // Baker's using a slow path (and also unpoison the loaded
4634 // reference, if heap poisoning is enabled).
4635 codegen_->MaybeGenerateReadBarrierSlow(
4636 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4637 }
4638 }
4639 break;
4640 }
4641
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004642 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004643 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004644 if (index.IsConstant()) {
4645 __ movq(out, Address(obj,
4646 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4647 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004648 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004649 }
4650 break;
4651 }
4652
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004653 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004654 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004655 if (index.IsConstant()) {
4656 __ movss(out, Address(obj,
4657 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4658 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004659 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004660 }
4661 break;
4662 }
4663
4664 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004665 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004666 if (index.IsConstant()) {
4667 __ movsd(out, Address(obj,
4668 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4669 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004670 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004671 }
4672 break;
4673 }
4674
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004675 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004676 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004677 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004678 }
Roland Levillain4d027112015-07-01 15:41:14 +01004679
4680 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004681 // Potential implicit null checks, in the case of reference
4682 // arrays, are handled in the previous switch statement.
4683 } else {
4684 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004685 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004686}
4687
4688void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004689 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004690
4691 bool needs_write_barrier =
4692 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004693 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004694 bool object_array_set_with_read_barrier =
4695 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004696
Nicolas Geoffray39468442014-09-02 15:17:15 +01004697 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004698 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004699 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004700 LocationSummary::kCallOnSlowPath :
4701 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004702
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004703 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004704 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4705 if (Primitive::IsFloatingPointType(value_type)) {
4706 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004707 } else {
4708 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4709 }
4710
4711 if (needs_write_barrier) {
4712 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004713
4714 // This first temporary register is possibly used for heap
4715 // reference poisoning and/or read barrier emission too.
4716 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004717 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004718 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004719}
4720
4721void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4722 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004723 Location array_loc = locations->InAt(0);
4724 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004725 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004726 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004727 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004728 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004729 bool needs_write_barrier =
4730 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004731 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4732 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4733 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004734
4735 switch (value_type) {
4736 case Primitive::kPrimBoolean:
4737 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004738 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4739 Address address = index.IsConstant()
4740 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4741 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4742 if (value.IsRegister()) {
4743 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004744 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004745 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004746 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004747 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004748 break;
4749 }
4750
4751 case Primitive::kPrimShort:
4752 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004753 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4754 Address address = index.IsConstant()
4755 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4756 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4757 if (value.IsRegister()) {
4758 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004759 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004760 DCHECK(value.IsConstant()) << value;
4761 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004762 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004763 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004764 break;
4765 }
4766
4767 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004768 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4769 Address address = index.IsConstant()
4770 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4771 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004772
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004773 if (!value.IsRegister()) {
4774 // Just setting null.
4775 DCHECK(instruction->InputAt(2)->IsNullConstant());
4776 DCHECK(value.IsConstant()) << value;
4777 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004778 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004779 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004780 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004781 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004782 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004783
4784 DCHECK(needs_write_barrier);
4785 CpuRegister register_value = value.AsRegister<CpuRegister>();
4786 NearLabel done, not_null, do_put;
4787 SlowPathCode* slow_path = nullptr;
4788 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004789 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004790 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4791 codegen_->AddSlowPath(slow_path);
4792 if (instruction->GetValueCanBeNull()) {
4793 __ testl(register_value, register_value);
4794 __ j(kNotEqual, &not_null);
4795 __ movl(address, Immediate(0));
4796 codegen_->MaybeRecordImplicitNullCheck(instruction);
4797 __ jmp(&done);
4798 __ Bind(&not_null);
4799 }
4800
Roland Levillain0d5a2812015-11-13 10:07:31 +00004801 if (kEmitCompilerReadBarrier) {
4802 // When read barriers are enabled, the type checking
4803 // instrumentation requires two read barriers:
4804 //
4805 // __ movl(temp2, temp);
4806 // // /* HeapReference<Class> */ temp = temp->component_type_
4807 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004808 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004809 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4810 //
4811 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4812 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004813 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004814 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4815 //
4816 // __ cmpl(temp, temp2);
4817 //
4818 // However, the second read barrier may trash `temp`, as it
4819 // is a temporary register, and as such would not be saved
4820 // along with live registers before calling the runtime (nor
4821 // restored afterwards). So in this case, we bail out and
4822 // delegate the work to the array set slow path.
4823 //
4824 // TODO: Extend the register allocator to support a new
4825 // "(locally) live temp" location so as to avoid always
4826 // going into the slow path when read barriers are enabled.
4827 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004828 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004829 // /* HeapReference<Class> */ temp = array->klass_
4830 __ movl(temp, Address(array, class_offset));
4831 codegen_->MaybeRecordImplicitNullCheck(instruction);
4832 __ MaybeUnpoisonHeapReference(temp);
4833
4834 // /* HeapReference<Class> */ temp = temp->component_type_
4835 __ movl(temp, Address(temp, component_offset));
4836 // If heap poisoning is enabled, no need to unpoison `temp`
4837 // nor the object reference in `register_value->klass`, as
4838 // we are comparing two poisoned references.
4839 __ cmpl(temp, Address(register_value, class_offset));
4840
4841 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4842 __ j(kEqual, &do_put);
4843 // If heap poisoning is enabled, the `temp` reference has
4844 // not been unpoisoned yet; unpoison it now.
4845 __ MaybeUnpoisonHeapReference(temp);
4846
4847 // /* HeapReference<Class> */ temp = temp->super_class_
4848 __ movl(temp, Address(temp, super_offset));
4849 // If heap poisoning is enabled, no need to unpoison
4850 // `temp`, as we are comparing against null below.
4851 __ testl(temp, temp);
4852 __ j(kNotEqual, slow_path->GetEntryLabel());
4853 __ Bind(&do_put);
4854 } else {
4855 __ j(kNotEqual, slow_path->GetEntryLabel());
4856 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004857 }
4858 }
4859
4860 if (kPoisonHeapReferences) {
4861 __ movl(temp, register_value);
4862 __ PoisonHeapReference(temp);
4863 __ movl(address, temp);
4864 } else {
4865 __ movl(address, register_value);
4866 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004867 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004868 codegen_->MaybeRecordImplicitNullCheck(instruction);
4869 }
4870
4871 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4872 codegen_->MarkGCCard(
4873 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4874 __ Bind(&done);
4875
4876 if (slow_path != nullptr) {
4877 __ Bind(slow_path->GetExitLabel());
4878 }
4879
4880 break;
4881 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004882
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004883 case Primitive::kPrimInt: {
4884 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4885 Address address = index.IsConstant()
4886 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4887 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4888 if (value.IsRegister()) {
4889 __ movl(address, value.AsRegister<CpuRegister>());
4890 } else {
4891 DCHECK(value.IsConstant()) << value;
4892 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4893 __ movl(address, Immediate(v));
4894 }
4895 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004896 break;
4897 }
4898
4899 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004900 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4901 Address address = index.IsConstant()
4902 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4903 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4904 if (value.IsRegister()) {
4905 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004906 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004907 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004908 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004909 Address address_high = index.IsConstant()
4910 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4911 offset + sizeof(int32_t))
4912 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4913 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004914 }
4915 break;
4916 }
4917
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004918 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004919 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4920 Address address = index.IsConstant()
4921 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4922 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004923 if (value.IsFpuRegister()) {
4924 __ movss(address, value.AsFpuRegister<XmmRegister>());
4925 } else {
4926 DCHECK(value.IsConstant());
4927 int32_t v =
4928 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4929 __ movl(address, Immediate(v));
4930 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004931 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004932 break;
4933 }
4934
4935 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004936 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4937 Address address = index.IsConstant()
4938 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4939 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004940 if (value.IsFpuRegister()) {
4941 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4942 codegen_->MaybeRecordImplicitNullCheck(instruction);
4943 } else {
4944 int64_t v =
4945 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4946 Address address_high = index.IsConstant()
4947 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4948 offset + sizeof(int32_t))
4949 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4950 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4951 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004952 break;
4953 }
4954
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004955 case Primitive::kPrimVoid:
4956 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004957 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004958 }
4959}
4960
4961void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004962 LocationSummary* locations =
4963 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004964 locations->SetInAt(0, Location::RequiresRegister());
4965 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004966}
4967
4968void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
4969 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01004970 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00004971 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4972 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004973 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004974 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004975}
4976
4977void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004978 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4979 ? LocationSummary::kCallOnSlowPath
4980 : LocationSummary::kNoCall;
4981 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004982 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04004983 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004984 if (instruction->HasUses()) {
4985 locations->SetOut(Location::SameAsFirstInput());
4986 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004987}
4988
4989void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4990 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004991 Location index_loc = locations->InAt(0);
4992 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07004993 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004994 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004995
Mark Mendell99dbd682015-04-22 16:18:52 -04004996 if (length_loc.IsConstant()) {
4997 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4998 if (index_loc.IsConstant()) {
4999 // BCE will remove the bounds check if we are guarenteed to pass.
5000 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5001 if (index < 0 || index >= length) {
5002 codegen_->AddSlowPath(slow_path);
5003 __ jmp(slow_path->GetEntryLabel());
5004 } else {
5005 // Some optimization after BCE may have generated this, and we should not
5006 // generate a bounds check if it is a valid range.
5007 }
5008 return;
5009 }
5010
5011 // We have to reverse the jump condition because the length is the constant.
5012 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5013 __ cmpl(index_reg, Immediate(length));
5014 codegen_->AddSlowPath(slow_path);
5015 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005016 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005017 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5018 if (index_loc.IsConstant()) {
5019 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5020 __ cmpl(length, Immediate(value));
5021 } else {
5022 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5023 }
5024 codegen_->AddSlowPath(slow_path);
5025 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005026 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005027}
5028
5029void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5030 CpuRegister card,
5031 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005032 CpuRegister value,
5033 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005034 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005035 if (value_can_be_null) {
5036 __ testl(value, value);
5037 __ j(kEqual, &is_null);
5038 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005039 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5040 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005041 __ movq(temp, object);
5042 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005043 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005044 if (value_can_be_null) {
5045 __ Bind(&is_null);
5046 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005047}
5048
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005049void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005050 LOG(FATAL) << "Unimplemented";
5051}
5052
5053void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005054 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5055}
5056
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005057void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5058 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5059}
5060
5061void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005062 HBasicBlock* block = instruction->GetBlock();
5063 if (block->GetLoopInformation() != nullptr) {
5064 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5065 // The back edge will generate the suspend check.
5066 return;
5067 }
5068 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5069 // The goto will generate the suspend check.
5070 return;
5071 }
5072 GenerateSuspendCheck(instruction, nullptr);
5073}
5074
5075void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5076 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005077 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005078 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5079 if (slow_path == nullptr) {
5080 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5081 instruction->SetSlowPath(slow_path);
5082 codegen_->AddSlowPath(slow_path);
5083 if (successor != nullptr) {
5084 DCHECK(successor->IsLoopHeader());
5085 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5086 }
5087 } else {
5088 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5089 }
5090
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005091 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5092 /* no_rip */ true),
5093 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005094 if (successor == nullptr) {
5095 __ j(kNotEqual, slow_path->GetEntryLabel());
5096 __ Bind(slow_path->GetReturnLabel());
5097 } else {
5098 __ j(kEqual, codegen_->GetLabelOf(successor));
5099 __ jmp(slow_path->GetEntryLabel());
5100 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005101}
5102
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005103X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5104 return codegen_->GetAssembler();
5105}
5106
5107void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005108 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005109 Location source = move->GetSource();
5110 Location destination = move->GetDestination();
5111
5112 if (source.IsRegister()) {
5113 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005114 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005115 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005116 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005117 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005118 } else {
5119 DCHECK(destination.IsDoubleStackSlot());
5120 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005121 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005122 }
5123 } else if (source.IsStackSlot()) {
5124 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005125 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005126 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005127 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005128 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005129 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005130 } else {
5131 DCHECK(destination.IsStackSlot());
5132 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5133 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5134 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005135 } else if (source.IsDoubleStackSlot()) {
5136 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005137 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005138 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005139 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005140 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5141 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005142 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005143 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005144 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5145 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5146 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005147 } else if (source.IsConstant()) {
5148 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005149 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5150 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005151 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005152 if (value == 0) {
5153 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5154 } else {
5155 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5156 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005157 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005158 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005159 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005160 }
5161 } else if (constant->IsLongConstant()) {
5162 int64_t value = constant->AsLongConstant()->GetValue();
5163 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005164 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005165 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005166 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005167 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005168 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005169 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005170 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005171 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005172 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005173 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005174 } else {
5175 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005176 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005177 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5178 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005179 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005180 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005181 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005182 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005183 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005184 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005185 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005186 } else {
5187 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005188 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005189 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005190 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005191 } else if (source.IsFpuRegister()) {
5192 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005193 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005194 } else if (destination.IsStackSlot()) {
5195 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005196 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005197 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005198 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005199 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005200 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005201 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005202 }
5203}
5204
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005205void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005206 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005207 __ movl(Address(CpuRegister(RSP), mem), reg);
5208 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005209}
5210
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005211void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005212 ScratchRegisterScope ensure_scratch(
5213 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5214
5215 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5216 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5217 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5218 Address(CpuRegister(RSP), mem2 + stack_offset));
5219 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5220 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5221 CpuRegister(ensure_scratch.GetRegister()));
5222}
5223
Mark Mendell8a1c7282015-06-29 15:41:28 -04005224void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5225 __ movq(CpuRegister(TMP), reg1);
5226 __ movq(reg1, reg2);
5227 __ movq(reg2, CpuRegister(TMP));
5228}
5229
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005230void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5231 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5232 __ movq(Address(CpuRegister(RSP), mem), reg);
5233 __ movq(reg, CpuRegister(TMP));
5234}
5235
5236void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5237 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005238 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005239
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005240 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5241 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5242 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5243 Address(CpuRegister(RSP), mem2 + stack_offset));
5244 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5245 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5246 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005247}
5248
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005249void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5250 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5251 __ movss(Address(CpuRegister(RSP), mem), reg);
5252 __ movd(reg, CpuRegister(TMP));
5253}
5254
5255void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5256 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5257 __ movsd(Address(CpuRegister(RSP), mem), reg);
5258 __ movd(reg, CpuRegister(TMP));
5259}
5260
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005261void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005262 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005263 Location source = move->GetSource();
5264 Location destination = move->GetDestination();
5265
5266 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005267 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005268 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005269 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005270 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005271 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005272 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005273 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5274 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005275 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005276 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005277 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005278 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5279 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005280 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005281 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5282 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5283 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005284 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005285 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005286 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005287 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005288 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005289 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005290 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005291 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005292 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005293 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005294 }
5295}
5296
5297
5298void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5299 __ pushq(CpuRegister(reg));
5300}
5301
5302
5303void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5304 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005305}
5306
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005307void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005308 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005309 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5310 Immediate(mirror::Class::kStatusInitialized));
5311 __ j(kLess, slow_path->GetEntryLabel());
5312 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005313 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005314}
5315
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005316void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005317 InvokeRuntimeCallingConvention calling_convention;
5318 CodeGenerator::CreateLoadClassLocationSummary(
5319 cls,
5320 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005321 Location::RegisterLocation(RAX),
5322 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005323}
5324
5325void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005326 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005327 if (cls->NeedsAccessCheck()) {
5328 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5329 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5330 cls,
5331 cls->GetDexPc(),
5332 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005333 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005334 return;
5335 }
5336
Roland Levillain0d5a2812015-11-13 10:07:31 +00005337 Location out_loc = locations->Out();
5338 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005339 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005340
Calin Juravle580b6092015-10-06 17:35:58 +01005341 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005342 DCHECK(!cls->CanCallRuntime());
5343 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005344 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5345 GenerateGcRootFieldLoad(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005346 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005347 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005348 // /* GcRoot<mirror::Class>[] */ out =
5349 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5350 __ movq(out, Address(current_method,
5351 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005352 // /* GcRoot<mirror::Class> */ out = out[type_index]
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005353 GenerateGcRootFieldLoad(
5354 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Roland Levillain4d027112015-07-01 15:41:14 +01005355
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005356 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5357 DCHECK(cls->CanCallRuntime());
5358 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5359 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5360 codegen_->AddSlowPath(slow_path);
5361 if (!cls->IsInDexCache()) {
5362 __ testl(out, out);
5363 __ j(kEqual, slow_path->GetEntryLabel());
5364 }
5365 if (cls->MustGenerateClinitCheck()) {
5366 GenerateClassInitializationCheck(slow_path, out);
5367 } else {
5368 __ Bind(slow_path->GetExitLabel());
5369 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005370 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005371 }
5372}
5373
5374void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5375 LocationSummary* locations =
5376 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5377 locations->SetInAt(0, Location::RequiresRegister());
5378 if (check->HasUses()) {
5379 locations->SetOut(Location::SameAsFirstInput());
5380 }
5381}
5382
5383void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005384 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005385 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005386 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005387 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005388 GenerateClassInitializationCheck(slow_path,
5389 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005390}
5391
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005392HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5393 HLoadString::LoadKind desired_string_load_kind) {
5394 if (kEmitCompilerReadBarrier) {
5395 switch (desired_string_load_kind) {
5396 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5397 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5398 case HLoadString::LoadKind::kBootImageAddress:
5399 // TODO: Implement for read barrier.
5400 return HLoadString::LoadKind::kDexCacheViaMethod;
5401 default:
5402 break;
5403 }
5404 }
5405 switch (desired_string_load_kind) {
5406 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5407 DCHECK(!GetCompilerOptions().GetCompilePic());
5408 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5409 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5410 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5411 DCHECK(GetCompilerOptions().GetCompilePic());
5412 break;
5413 case HLoadString::LoadKind::kBootImageAddress:
5414 break;
5415 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005416 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005417 break;
5418 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005419 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005420 break;
5421 case HLoadString::LoadKind::kDexCacheViaMethod:
5422 break;
5423 }
5424 return desired_string_load_kind;
5425}
5426
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005427void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005428 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005429 ? LocationSummary::kCallOnSlowPath
5430 : LocationSummary::kNoCall;
5431 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005432 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5433 locations->SetInAt(0, Location::RequiresRegister());
5434 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005435 locations->SetOut(Location::RequiresRegister());
5436}
5437
5438void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005439 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005440 Location out_loc = locations->Out();
5441 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005442
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005443 switch (load->GetLoadKind()) {
5444 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5445 DCHECK(!kEmitCompilerReadBarrier);
5446 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5447 codegen_->RecordStringPatch(load);
5448 return; // No dex cache slow path.
5449 }
5450 case HLoadString::LoadKind::kBootImageAddress: {
5451 DCHECK(!kEmitCompilerReadBarrier);
5452 DCHECK_NE(load->GetAddress(), 0u);
5453 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5454 __ movl(out, Immediate(address)); // Zero-extended.
5455 codegen_->RecordSimplePatch();
5456 return; // No dex cache slow path.
5457 }
5458 case HLoadString::LoadKind::kDexCacheAddress: {
5459 DCHECK_NE(load->GetAddress(), 0u);
5460 if (IsUint<32>(load->GetAddress())) {
5461 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5462 GenerateGcRootFieldLoad(load, out_loc, address);
5463 } else {
5464 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5465 __ movq(out, Immediate(load->GetAddress()));
5466 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5467 }
5468 break;
5469 }
5470 case HLoadString::LoadKind::kDexCachePcRelative: {
5471 uint32_t offset = load->GetDexCacheElementOffset();
5472 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5473 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5474 /* no_rip */ false);
5475 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5476 break;
5477 }
5478 case HLoadString::LoadKind::kDexCacheViaMethod: {
5479 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5480
5481 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5482 GenerateGcRootFieldLoad(
5483 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5484 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5485 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5486 // /* GcRoot<mirror::String> */ out = out[string_index]
5487 GenerateGcRootFieldLoad(
5488 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5489 break;
5490 }
5491 default:
5492 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5493 UNREACHABLE();
5494 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005495
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005496 if (!load->IsInDexCache()) {
5497 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5498 codegen_->AddSlowPath(slow_path);
5499 __ testl(out, out);
5500 __ j(kEqual, slow_path->GetEntryLabel());
5501 __ Bind(slow_path->GetExitLabel());
5502 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005503}
5504
David Brazdilcb1c0552015-08-04 16:22:25 +01005505static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005506 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5507 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005508}
5509
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005510void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5511 LocationSummary* locations =
5512 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5513 locations->SetOut(Location::RequiresRegister());
5514}
5515
5516void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005517 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5518}
5519
5520void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5521 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5522}
5523
5524void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5525 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005526}
5527
5528void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5529 LocationSummary* locations =
5530 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5531 InvokeRuntimeCallingConvention calling_convention;
5532 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5533}
5534
5535void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005536 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5537 instruction,
5538 instruction->GetDexPc(),
5539 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005540 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005541}
5542
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005543static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5544 return kEmitCompilerReadBarrier &&
5545 (kUseBakerReadBarrier ||
5546 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5547 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5548 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5549}
5550
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005551void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005552 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005553 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5554 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005555 case TypeCheckKind::kExactCheck:
5556 case TypeCheckKind::kAbstractClassCheck:
5557 case TypeCheckKind::kClassHierarchyCheck:
5558 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005559 call_kind =
5560 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005561 break;
5562 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005563 case TypeCheckKind::kUnresolvedCheck:
5564 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005565 call_kind = LocationSummary::kCallOnSlowPath;
5566 break;
5567 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005568
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005570 locations->SetInAt(0, Location::RequiresRegister());
5571 locations->SetInAt(1, Location::Any());
5572 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5573 locations->SetOut(Location::RequiresRegister());
5574 // When read barriers are enabled, we need a temporary register for
5575 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005576 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005577 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005578 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005579}
5580
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005581void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005582 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005583 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005584 Location obj_loc = locations->InAt(0);
5585 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005586 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005587 Location out_loc = locations->Out();
5588 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005589 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005590 locations->GetTemp(0) :
5591 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005592 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005593 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5594 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5595 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005596 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005597 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005598
5599 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005600 // Avoid null check if we know obj is not null.
5601 if (instruction->MustDoNullCheck()) {
5602 __ testl(obj, obj);
5603 __ j(kEqual, &zero);
5604 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005605
Roland Levillain0d5a2812015-11-13 10:07:31 +00005606 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005607 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005608
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005609 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005610 case TypeCheckKind::kExactCheck: {
5611 if (cls.IsRegister()) {
5612 __ cmpl(out, cls.AsRegister<CpuRegister>());
5613 } else {
5614 DCHECK(cls.IsStackSlot()) << cls;
5615 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5616 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005617 if (zero.IsLinked()) {
5618 // Classes must be equal for the instanceof to succeed.
5619 __ j(kNotEqual, &zero);
5620 __ movl(out, Immediate(1));
5621 __ jmp(&done);
5622 } else {
5623 __ setcc(kEqual, out);
5624 // setcc only sets the low byte.
5625 __ andl(out, Immediate(1));
5626 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005627 break;
5628 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005629
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005630 case TypeCheckKind::kAbstractClassCheck: {
5631 // If the class is abstract, we eagerly fetch the super class of the
5632 // object to avoid doing a comparison we know will fail.
5633 NearLabel loop, success;
5634 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005635 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005636 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005637 __ testl(out, out);
5638 // If `out` is null, we use it for the result, and jump to `done`.
5639 __ j(kEqual, &done);
5640 if (cls.IsRegister()) {
5641 __ cmpl(out, cls.AsRegister<CpuRegister>());
5642 } else {
5643 DCHECK(cls.IsStackSlot()) << cls;
5644 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5645 }
5646 __ j(kNotEqual, &loop);
5647 __ movl(out, Immediate(1));
5648 if (zero.IsLinked()) {
5649 __ jmp(&done);
5650 }
5651 break;
5652 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005653
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005654 case TypeCheckKind::kClassHierarchyCheck: {
5655 // Walk over the class hierarchy to find a match.
5656 NearLabel loop, success;
5657 __ Bind(&loop);
5658 if (cls.IsRegister()) {
5659 __ cmpl(out, cls.AsRegister<CpuRegister>());
5660 } else {
5661 DCHECK(cls.IsStackSlot()) << cls;
5662 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5663 }
5664 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005665 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005666 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005667 __ testl(out, out);
5668 __ j(kNotEqual, &loop);
5669 // If `out` is null, we use it for the result, and jump to `done`.
5670 __ jmp(&done);
5671 __ Bind(&success);
5672 __ movl(out, Immediate(1));
5673 if (zero.IsLinked()) {
5674 __ jmp(&done);
5675 }
5676 break;
5677 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005678
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005679 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005680 // Do an exact check.
5681 NearLabel exact_check;
5682 if (cls.IsRegister()) {
5683 __ cmpl(out, cls.AsRegister<CpuRegister>());
5684 } else {
5685 DCHECK(cls.IsStackSlot()) << cls;
5686 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5687 }
5688 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005689 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005690 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005691 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005692 __ testl(out, out);
5693 // If `out` is null, we use it for the result, and jump to `done`.
5694 __ j(kEqual, &done);
5695 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5696 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005697 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005698 __ movl(out, Immediate(1));
5699 __ jmp(&done);
5700 break;
5701 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005702
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005703 case TypeCheckKind::kArrayCheck: {
5704 if (cls.IsRegister()) {
5705 __ cmpl(out, cls.AsRegister<CpuRegister>());
5706 } else {
5707 DCHECK(cls.IsStackSlot()) << cls;
5708 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5709 }
5710 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005711 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5712 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005713 codegen_->AddSlowPath(slow_path);
5714 __ j(kNotEqual, slow_path->GetEntryLabel());
5715 __ movl(out, Immediate(1));
5716 if (zero.IsLinked()) {
5717 __ jmp(&done);
5718 }
5719 break;
5720 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005721
Calin Juravle98893e12015-10-02 21:05:03 +01005722 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005723 case TypeCheckKind::kInterfaceCheck: {
5724 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005725 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005726 // cases.
5727 //
5728 // We cannot directly call the InstanceofNonTrivial runtime
5729 // entry point without resorting to a type checking slow path
5730 // here (i.e. by calling InvokeRuntime directly), as it would
5731 // require to assign fixed registers for the inputs of this
5732 // HInstanceOf instruction (following the runtime calling
5733 // convention), which might be cluttered by the potential first
5734 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005735 //
5736 // TODO: Introduce a new runtime entry point taking the object
5737 // to test (instead of its class) as argument, and let it deal
5738 // with the read barrier issues. This will let us refactor this
5739 // case of the `switch` code as it was previously (with a direct
5740 // call to the runtime not using a type checking slow path).
5741 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005742 DCHECK(locations->OnlyCallsOnSlowPath());
5743 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5744 /* is_fatal */ false);
5745 codegen_->AddSlowPath(slow_path);
5746 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005747 if (zero.IsLinked()) {
5748 __ jmp(&done);
5749 }
5750 break;
5751 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005752 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005753
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005754 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005755 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005756 __ xorl(out, out);
5757 }
5758
5759 if (done.IsLinked()) {
5760 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005761 }
5762
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005763 if (slow_path != nullptr) {
5764 __ Bind(slow_path->GetExitLabel());
5765 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005766}
5767
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005768void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005769 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5770 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005771 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5772 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005773 case TypeCheckKind::kExactCheck:
5774 case TypeCheckKind::kAbstractClassCheck:
5775 case TypeCheckKind::kClassHierarchyCheck:
5776 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005777 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5778 LocationSummary::kCallOnSlowPath :
5779 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005780 break;
5781 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005782 case TypeCheckKind::kUnresolvedCheck:
5783 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005784 call_kind = LocationSummary::kCallOnSlowPath;
5785 break;
5786 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005787 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5788 locations->SetInAt(0, Location::RequiresRegister());
5789 locations->SetInAt(1, Location::Any());
5790 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5791 locations->AddTemp(Location::RequiresRegister());
5792 // When read barriers are enabled, we need an additional temporary
5793 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005794 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005795 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005796 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005797}
5798
5799void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005800 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005801 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005802 Location obj_loc = locations->InAt(0);
5803 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005804 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005805 Location temp_loc = locations->GetTemp(0);
5806 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005807 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005808 locations->GetTemp(1) :
5809 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005810 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5811 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5812 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5813 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005814
Roland Levillain0d5a2812015-11-13 10:07:31 +00005815 bool is_type_check_slow_path_fatal =
5816 (type_check_kind == TypeCheckKind::kExactCheck ||
5817 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5818 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5819 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5820 !instruction->CanThrowIntoCatchBlock();
5821 SlowPathCode* type_check_slow_path =
5822 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5823 is_type_check_slow_path_fatal);
5824 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005825
Roland Levillain0d5a2812015-11-13 10:07:31 +00005826 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005827 case TypeCheckKind::kExactCheck:
5828 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005829 NearLabel done;
5830 // Avoid null check if we know obj is not null.
5831 if (instruction->MustDoNullCheck()) {
5832 __ testl(obj, obj);
5833 __ j(kEqual, &done);
5834 }
5835
5836 // /* HeapReference<Class> */ temp = obj->klass_
5837 GenerateReferenceLoadTwoRegisters(
5838 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5839
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005840 if (cls.IsRegister()) {
5841 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5842 } else {
5843 DCHECK(cls.IsStackSlot()) << cls;
5844 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5845 }
5846 // Jump to slow path for throwing the exception or doing a
5847 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005848 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005849 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005850 break;
5851 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005852
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005853 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005854 NearLabel done;
5855 // Avoid null check if we know obj is not null.
5856 if (instruction->MustDoNullCheck()) {
5857 __ testl(obj, obj);
5858 __ j(kEqual, &done);
5859 }
5860
5861 // /* HeapReference<Class> */ temp = obj->klass_
5862 GenerateReferenceLoadTwoRegisters(
5863 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5864
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005865 // If the class is abstract, we eagerly fetch the super class of the
5866 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005867 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005868 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005869 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005870 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005871
5872 // If the class reference currently in `temp` is not null, jump
5873 // to the `compare_classes` label to compare it with the checked
5874 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005875 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005876 __ j(kNotEqual, &compare_classes);
5877 // Otherwise, jump to the slow path to throw the exception.
5878 //
5879 // But before, move back the object's class into `temp` before
5880 // going into the slow path, as it has been overwritten in the
5881 // meantime.
5882 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005883 GenerateReferenceLoadTwoRegisters(
5884 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005885 __ jmp(type_check_slow_path->GetEntryLabel());
5886
5887 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005888 if (cls.IsRegister()) {
5889 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5890 } else {
5891 DCHECK(cls.IsStackSlot()) << cls;
5892 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5893 }
5894 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005895 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005896 break;
5897 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005898
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005899 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005900 NearLabel done;
5901 // Avoid null check if we know obj is not null.
5902 if (instruction->MustDoNullCheck()) {
5903 __ testl(obj, obj);
5904 __ j(kEqual, &done);
5905 }
5906
5907 // /* HeapReference<Class> */ temp = obj->klass_
5908 GenerateReferenceLoadTwoRegisters(
5909 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5910
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005911 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005912 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005913 __ Bind(&loop);
5914 if (cls.IsRegister()) {
5915 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5916 } else {
5917 DCHECK(cls.IsStackSlot()) << cls;
5918 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5919 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005920 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005921
Roland Levillain0d5a2812015-11-13 10:07:31 +00005922 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005923 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005924
5925 // If the class reference currently in `temp` is not null, jump
5926 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005927 __ testl(temp, temp);
5928 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005929 // Otherwise, jump to the slow path to throw the exception.
5930 //
5931 // But before, move back the object's class into `temp` before
5932 // going into the slow path, as it has been overwritten in the
5933 // meantime.
5934 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005935 GenerateReferenceLoadTwoRegisters(
5936 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005937 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005938 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005939 break;
5940 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005941
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005942 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005943 // We cannot use a NearLabel here, as its range might be too
5944 // short in some cases when read barriers are enabled. This has
5945 // been observed for instance when the code emitted for this
5946 // case uses high x86-64 registers (R8-R15).
5947 Label done;
5948 // Avoid null check if we know obj is not null.
5949 if (instruction->MustDoNullCheck()) {
5950 __ testl(obj, obj);
5951 __ j(kEqual, &done);
5952 }
5953
5954 // /* HeapReference<Class> */ temp = obj->klass_
5955 GenerateReferenceLoadTwoRegisters(
5956 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5957
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005958 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005959 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005960 if (cls.IsRegister()) {
5961 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5962 } else {
5963 DCHECK(cls.IsStackSlot()) << cls;
5964 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5965 }
5966 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005967
5968 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005969 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005970 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005971
5972 // If the component type is not null (i.e. the object is indeed
5973 // an array), jump to label `check_non_primitive_component_type`
5974 // to further check that this component type is not a primitive
5975 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005976 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005977 __ j(kNotEqual, &check_non_primitive_component_type);
5978 // Otherwise, jump to the slow path to throw the exception.
5979 //
5980 // But before, move back the object's class into `temp` before
5981 // going into the slow path, as it has been overwritten in the
5982 // meantime.
5983 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005984 GenerateReferenceLoadTwoRegisters(
5985 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005986 __ jmp(type_check_slow_path->GetEntryLabel());
5987
5988 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005989 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005990 __ j(kEqual, &done);
5991 // Same comment as above regarding `temp` and the slow path.
5992 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005993 GenerateReferenceLoadTwoRegisters(
5994 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005995 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005996 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005997 break;
5998 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005999
Calin Juravle98893e12015-10-02 21:05:03 +01006000 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006001 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006002 NearLabel done;
6003 // Avoid null check if we know obj is not null.
6004 if (instruction->MustDoNullCheck()) {
6005 __ testl(obj, obj);
6006 __ j(kEqual, &done);
6007 }
6008
6009 // /* HeapReference<Class> */ temp = obj->klass_
6010 GenerateReferenceLoadTwoRegisters(
6011 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6012
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006013 // We always go into the type check slow path for the unresolved
6014 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006015 //
6016 // We cannot directly call the CheckCast runtime entry point
6017 // without resorting to a type checking slow path here (i.e. by
6018 // calling InvokeRuntime directly), as it would require to
6019 // assign fixed registers for the inputs of this HInstanceOf
6020 // instruction (following the runtime calling convention), which
6021 // might be cluttered by the potential first read barrier
6022 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006023 //
6024 // TODO: Introduce a new runtime entry point taking the object
6025 // to test (instead of its class) as argument, and let it deal
6026 // with the read barrier issues. This will let us refactor this
6027 // case of the `switch` code as it was previously (with a direct
6028 // call to the runtime not using a type checking slow path).
6029 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006030 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006031 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006032 break;
6033 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006034
Roland Levillain0d5a2812015-11-13 10:07:31 +00006035 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006036}
6037
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006038void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6039 LocationSummary* locations =
6040 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
6041 InvokeRuntimeCallingConvention calling_convention;
6042 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6043}
6044
6045void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006046 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6047 : QUICK_ENTRY_POINT(pUnlockObject),
6048 instruction,
6049 instruction->GetDexPc(),
6050 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006051 if (instruction->IsEnter()) {
6052 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6053 } else {
6054 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6055 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006056}
6057
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006058void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6059void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6060void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6061
6062void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6063 LocationSummary* locations =
6064 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6065 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6066 || instruction->GetResultType() == Primitive::kPrimLong);
6067 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006068 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006069 locations->SetOut(Location::SameAsFirstInput());
6070}
6071
6072void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6073 HandleBitwiseOperation(instruction);
6074}
6075
6076void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6077 HandleBitwiseOperation(instruction);
6078}
6079
6080void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6081 HandleBitwiseOperation(instruction);
6082}
6083
6084void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6085 LocationSummary* locations = instruction->GetLocations();
6086 Location first = locations->InAt(0);
6087 Location second = locations->InAt(1);
6088 DCHECK(first.Equals(locations->Out()));
6089
6090 if (instruction->GetResultType() == Primitive::kPrimInt) {
6091 if (second.IsRegister()) {
6092 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006093 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006094 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006095 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006096 } else {
6097 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006098 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006099 }
6100 } else if (second.IsConstant()) {
6101 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6102 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006103 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006104 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006105 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006106 } else {
6107 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006108 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006109 }
6110 } else {
6111 Address address(CpuRegister(RSP), second.GetStackIndex());
6112 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006113 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006114 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006115 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006116 } else {
6117 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006118 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006119 }
6120 }
6121 } else {
6122 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006123 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6124 bool second_is_constant = false;
6125 int64_t value = 0;
6126 if (second.IsConstant()) {
6127 second_is_constant = true;
6128 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006129 }
Mark Mendell40741f32015-04-20 22:10:34 -04006130 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006131
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006132 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006133 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006134 if (is_int32_value) {
6135 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6136 } else {
6137 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6138 }
6139 } else if (second.IsDoubleStackSlot()) {
6140 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006141 } else {
6142 __ andq(first_reg, second.AsRegister<CpuRegister>());
6143 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006144 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006145 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006146 if (is_int32_value) {
6147 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6148 } else {
6149 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6150 }
6151 } else if (second.IsDoubleStackSlot()) {
6152 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006153 } else {
6154 __ orq(first_reg, second.AsRegister<CpuRegister>());
6155 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006156 } else {
6157 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006158 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006159 if (is_int32_value) {
6160 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6161 } else {
6162 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6163 }
6164 } else if (second.IsDoubleStackSlot()) {
6165 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006166 } else {
6167 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6168 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006169 }
6170 }
6171}
6172
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006173void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6174 Location out,
6175 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006176 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006177 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6178 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006179 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006180 if (kUseBakerReadBarrier) {
6181 // Load with fast path based Baker's read barrier.
6182 // /* HeapReference<Object> */ out = *(out + offset)
6183 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006184 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006185 } else {
6186 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006187 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006188 // in the following move operation, as we will need it for the
6189 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006190 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006191 // /* HeapReference<Object> */ out = *(out + offset)
6192 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006193 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006194 }
6195 } else {
6196 // Plain load with no read barrier.
6197 // /* HeapReference<Object> */ out = *(out + offset)
6198 __ movl(out_reg, Address(out_reg, offset));
6199 __ MaybeUnpoisonHeapReference(out_reg);
6200 }
6201}
6202
6203void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6204 Location out,
6205 Location obj,
6206 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006207 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006208 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6209 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6210 if (kEmitCompilerReadBarrier) {
6211 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006212 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006213 // Load with fast path based Baker's read barrier.
6214 // /* HeapReference<Object> */ out = *(obj + offset)
6215 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006216 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006217 } else {
6218 // Load with slow path based read barrier.
6219 // /* HeapReference<Object> */ out = *(obj + offset)
6220 __ movl(out_reg, Address(obj_reg, offset));
6221 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6222 }
6223 } else {
6224 // Plain load with no read barrier.
6225 // /* HeapReference<Object> */ out = *(obj + offset)
6226 __ movl(out_reg, Address(obj_reg, offset));
6227 __ MaybeUnpoisonHeapReference(out_reg);
6228 }
6229}
6230
6231void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6232 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006233 const Address& address,
6234 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006235 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6236 if (kEmitCompilerReadBarrier) {
6237 if (kUseBakerReadBarrier) {
6238 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6239 // Baker's read barrier are used:
6240 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006241 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006242 // if (Thread::Current()->GetIsGcMarking()) {
6243 // root = ReadBarrier::Mark(root)
6244 // }
6245
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006246 // /* GcRoot<mirror::Object> */ root = *address
6247 __ movl(root_reg, address);
6248 if (fixup_label != nullptr) {
6249 __ Bind(fixup_label);
6250 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006251 static_assert(
6252 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6253 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6254 "have different sizes.");
6255 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6256 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6257 "have different sizes.");
6258
6259 // Slow path used to mark the GC root `root`.
6260 SlowPathCode* slow_path =
6261 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6262 codegen_->AddSlowPath(slow_path);
6263
6264 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6265 /* no_rip */ true),
6266 Immediate(0));
6267 __ j(kNotEqual, slow_path->GetEntryLabel());
6268 __ Bind(slow_path->GetExitLabel());
6269 } else {
6270 // GC root loaded through a slow path for read barriers other
6271 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006272 // /* GcRoot<mirror::Object>* */ root = address
6273 __ leaq(root_reg, address);
6274 if (fixup_label != nullptr) {
6275 __ Bind(fixup_label);
6276 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006277 // /* mirror::Object* */ root = root->Read()
6278 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6279 }
6280 } else {
6281 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006282 // /* GcRoot<mirror::Object> */ root = *address
6283 __ movl(root_reg, address);
6284 if (fixup_label != nullptr) {
6285 __ Bind(fixup_label);
6286 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006287 // Note that GC roots are not affected by heap poisoning, thus we
6288 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006289 }
6290}
6291
6292void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6293 Location ref,
6294 CpuRegister obj,
6295 uint32_t offset,
6296 Location temp,
6297 bool needs_null_check) {
6298 DCHECK(kEmitCompilerReadBarrier);
6299 DCHECK(kUseBakerReadBarrier);
6300
6301 // /* HeapReference<Object> */ ref = *(obj + offset)
6302 Address src(obj, offset);
6303 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6304}
6305
6306void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6307 Location ref,
6308 CpuRegister obj,
6309 uint32_t data_offset,
6310 Location index,
6311 Location temp,
6312 bool needs_null_check) {
6313 DCHECK(kEmitCompilerReadBarrier);
6314 DCHECK(kUseBakerReadBarrier);
6315
6316 // /* HeapReference<Object> */ ref =
6317 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6318 Address src = index.IsConstant() ?
6319 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6320 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6321 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6322}
6323
6324void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6325 Location ref,
6326 CpuRegister obj,
6327 const Address& src,
6328 Location temp,
6329 bool needs_null_check) {
6330 DCHECK(kEmitCompilerReadBarrier);
6331 DCHECK(kUseBakerReadBarrier);
6332
6333 // In slow path based read barriers, the read barrier call is
6334 // inserted after the original load. However, in fast path based
6335 // Baker's read barriers, we need to perform the load of
6336 // mirror::Object::monitor_ *before* the original reference load.
6337 // This load-load ordering is required by the read barrier.
6338 // The fast path/slow path (for Baker's algorithm) should look like:
6339 //
6340 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6341 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6342 // HeapReference<Object> ref = *src; // Original reference load.
6343 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6344 // if (is_gray) {
6345 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6346 // }
6347 //
6348 // Note: the original implementation in ReadBarrier::Barrier is
6349 // slightly more complex as:
6350 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006351 // the high-bits of rb_state, which are expected to be all zeroes
6352 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6353 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006354 // - it performs additional checks that we do not do here for
6355 // performance reasons.
6356
6357 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6358 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6359 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6360
6361 // /* int32_t */ monitor = obj->monitor_
6362 __ movl(temp_reg, Address(obj, monitor_offset));
6363 if (needs_null_check) {
6364 MaybeRecordImplicitNullCheck(instruction);
6365 }
6366 // /* LockWord */ lock_word = LockWord(monitor)
6367 static_assert(sizeof(LockWord) == sizeof(int32_t),
6368 "art::LockWord and int32_t have different sizes.");
6369 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6370 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6371 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6372 static_assert(
6373 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6374 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6375
6376 // Load fence to prevent load-load reordering.
6377 // Note that this is a no-op, thanks to the x86-64 memory model.
6378 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6379
6380 // The actual reference load.
6381 // /* HeapReference<Object> */ ref = *src
6382 __ movl(ref_reg, src);
6383
6384 // Object* ref = ref_addr->AsMirrorPtr()
6385 __ MaybeUnpoisonHeapReference(ref_reg);
6386
6387 // Slow path used to mark the object `ref` when it is gray.
6388 SlowPathCode* slow_path =
6389 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6390 AddSlowPath(slow_path);
6391
6392 // if (rb_state == ReadBarrier::gray_ptr_)
6393 // ref = ReadBarrier::Mark(ref);
6394 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6395 __ j(kEqual, slow_path->GetEntryLabel());
6396 __ Bind(slow_path->GetExitLabel());
6397}
6398
6399void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6400 Location out,
6401 Location ref,
6402 Location obj,
6403 uint32_t offset,
6404 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006405 DCHECK(kEmitCompilerReadBarrier);
6406
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006407 // Insert a slow path based read barrier *after* the reference load.
6408 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006409 // If heap poisoning is enabled, the unpoisoning of the loaded
6410 // reference will be carried out by the runtime within the slow
6411 // path.
6412 //
6413 // Note that `ref` currently does not get unpoisoned (when heap
6414 // poisoning is enabled), which is alright as the `ref` argument is
6415 // not used by the artReadBarrierSlow entry point.
6416 //
6417 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6418 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6419 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6420 AddSlowPath(slow_path);
6421
Roland Levillain0d5a2812015-11-13 10:07:31 +00006422 __ jmp(slow_path->GetEntryLabel());
6423 __ Bind(slow_path->GetExitLabel());
6424}
6425
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006426void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6427 Location out,
6428 Location ref,
6429 Location obj,
6430 uint32_t offset,
6431 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006432 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006433 // Baker's read barriers shall be handled by the fast path
6434 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6435 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006436 // If heap poisoning is enabled, unpoisoning will be taken care of
6437 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006438 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006439 } else if (kPoisonHeapReferences) {
6440 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6441 }
6442}
6443
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006444void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6445 Location out,
6446 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006447 DCHECK(kEmitCompilerReadBarrier);
6448
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006449 // Insert a slow path based read barrier *after* the GC root load.
6450 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006451 // Note that GC roots are not affected by heap poisoning, so we do
6452 // not need to do anything special for this here.
6453 SlowPathCode* slow_path =
6454 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6455 AddSlowPath(slow_path);
6456
Roland Levillain0d5a2812015-11-13 10:07:31 +00006457 __ jmp(slow_path->GetEntryLabel());
6458 __ Bind(slow_path->GetExitLabel());
6459}
6460
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006461void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006462 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006463 LOG(FATAL) << "Unreachable";
6464}
6465
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006466void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006467 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006468 LOG(FATAL) << "Unreachable";
6469}
6470
Mark Mendellfe57faa2015-09-18 09:26:15 -04006471// Simple implementation of packed switch - generate cascaded compare/jumps.
6472void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6473 LocationSummary* locations =
6474 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6475 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006476 locations->AddTemp(Location::RequiresRegister());
6477 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006478}
6479
6480void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6481 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006482 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006483 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006484 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6485 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6486 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006487 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6488
6489 // Should we generate smaller inline compare/jumps?
6490 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6491 // Figure out the correct compare values and jump conditions.
6492 // Handle the first compare/branch as a special case because it might
6493 // jump to the default case.
6494 DCHECK_GT(num_entries, 2u);
6495 Condition first_condition;
6496 uint32_t index;
6497 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6498 if (lower_bound != 0) {
6499 first_condition = kLess;
6500 __ cmpl(value_reg_in, Immediate(lower_bound));
6501 __ j(first_condition, codegen_->GetLabelOf(default_block));
6502 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6503
6504 index = 1;
6505 } else {
6506 // Handle all the compare/jumps below.
6507 first_condition = kBelow;
6508 index = 0;
6509 }
6510
6511 // Handle the rest of the compare/jumps.
6512 for (; index + 1 < num_entries; index += 2) {
6513 int32_t compare_to_value = lower_bound + index + 1;
6514 __ cmpl(value_reg_in, Immediate(compare_to_value));
6515 // Jump to successors[index] if value < case_value[index].
6516 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6517 // Jump to successors[index + 1] if value == case_value[index + 1].
6518 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6519 }
6520
6521 if (index != num_entries) {
6522 // There are an odd number of entries. Handle the last one.
6523 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006524 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006525 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6526 }
6527
6528 // And the default for any other value.
6529 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6530 __ jmp(codegen_->GetLabelOf(default_block));
6531 }
6532 return;
6533 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006534
6535 // Remove the bias, if needed.
6536 Register value_reg_out = value_reg_in.AsRegister();
6537 if (lower_bound != 0) {
6538 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6539 value_reg_out = temp_reg.AsRegister();
6540 }
6541 CpuRegister value_reg(value_reg_out);
6542
6543 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006544 __ cmpl(value_reg, Immediate(num_entries - 1));
6545 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006546
Mark Mendell9c86b482015-09-18 13:36:07 -04006547 // We are in the range of the table.
6548 // Load the address of the jump table in the constant area.
6549 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006550
Mark Mendell9c86b482015-09-18 13:36:07 -04006551 // Load the (signed) offset from the jump table.
6552 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6553
6554 // Add the offset to the address of the table base.
6555 __ addq(temp_reg, base_reg);
6556
6557 // And jump.
6558 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006559}
6560
Aart Bikc5d47542016-01-27 17:00:35 -08006561void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6562 if (value == 0) {
6563 __ xorl(dest, dest);
6564 } else {
6565 __ movl(dest, Immediate(value));
6566 }
6567}
6568
Mark Mendell92e83bf2015-05-07 11:25:03 -04006569void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6570 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006571 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006572 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006573 } else if (IsUint<32>(value)) {
6574 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006575 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6576 } else {
6577 __ movq(dest, Immediate(value));
6578 }
6579}
6580
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006581void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6582 if (value == 0) {
6583 __ xorps(dest, dest);
6584 } else {
6585 __ movss(dest, LiteralInt32Address(value));
6586 }
6587}
6588
6589void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6590 if (value == 0) {
6591 __ xorpd(dest, dest);
6592 } else {
6593 __ movsd(dest, LiteralInt64Address(value));
6594 }
6595}
6596
6597void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6598 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6599}
6600
6601void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6602 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6603}
6604
Aart Bika19616e2016-02-01 18:57:58 -08006605void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6606 if (value == 0) {
6607 __ testl(dest, dest);
6608 } else {
6609 __ cmpl(dest, Immediate(value));
6610 }
6611}
6612
6613void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6614 if (IsInt<32>(value)) {
6615 if (value == 0) {
6616 __ testq(dest, dest);
6617 } else {
6618 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6619 }
6620 } else {
6621 // Value won't fit in an int.
6622 __ cmpq(dest, LiteralInt64Address(value));
6623 }
6624}
6625
Mark Mendellcfa410b2015-05-25 16:02:44 -04006626void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6627 DCHECK(dest.IsDoubleStackSlot());
6628 if (IsInt<32>(value)) {
6629 // Can move directly as an int32 constant.
6630 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6631 Immediate(static_cast<int32_t>(value)));
6632 } else {
6633 Load64BitValue(CpuRegister(TMP), value);
6634 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6635 }
6636}
6637
Mark Mendell9c86b482015-09-18 13:36:07 -04006638/**
6639 * Class to handle late fixup of offsets into constant area.
6640 */
6641class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6642 public:
6643 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6644 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6645
6646 protected:
6647 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6648
6649 CodeGeneratorX86_64* codegen_;
6650
6651 private:
6652 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6653 // Patch the correct offset for the instruction. We use the address of the
6654 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6655 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6656 int32_t relative_position = constant_offset - pos;
6657
6658 // Patch in the right value.
6659 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6660 }
6661
6662 // Location in constant area that the fixup refers to.
6663 size_t offset_into_constant_area_;
6664};
6665
6666/**
6667 t * Class to handle late fixup of offsets to a jump table that will be created in the
6668 * constant area.
6669 */
6670class JumpTableRIPFixup : public RIPFixup {
6671 public:
6672 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6673 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6674
6675 void CreateJumpTable() {
6676 X86_64Assembler* assembler = codegen_->GetAssembler();
6677
6678 // Ensure that the reference to the jump table has the correct offset.
6679 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6680 SetOffset(offset_in_constant_table);
6681
6682 // Compute the offset from the start of the function to this jump table.
6683 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6684
6685 // Populate the jump table with the correct values for the jump table.
6686 int32_t num_entries = switch_instr_->GetNumEntries();
6687 HBasicBlock* block = switch_instr_->GetBlock();
6688 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6689 // The value that we want is the target offset - the position of the table.
6690 for (int32_t i = 0; i < num_entries; i++) {
6691 HBasicBlock* b = successors[i];
6692 Label* l = codegen_->GetLabelOf(b);
6693 DCHECK(l->IsBound());
6694 int32_t offset_to_block = l->Position() - current_table_offset;
6695 assembler->AppendInt32(offset_to_block);
6696 }
6697 }
6698
6699 private:
6700 const HPackedSwitch* switch_instr_;
6701};
6702
Mark Mendellf55c3e02015-03-26 21:07:46 -04006703void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6704 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006705 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006706 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6707 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006708 assembler->Align(4, 0);
6709 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006710
6711 // Populate any jump tables.
6712 for (auto jump_table : fixups_to_jump_tables_) {
6713 jump_table->CreateJumpTable();
6714 }
6715
6716 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006717 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006718 }
6719
6720 // And finish up.
6721 CodeGenerator::Finalize(allocator);
6722}
6723
Mark Mendellf55c3e02015-03-26 21:07:46 -04006724Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6725 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6726 return Address::RIP(fixup);
6727}
6728
6729Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6730 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6731 return Address::RIP(fixup);
6732}
6733
6734Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6735 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6736 return Address::RIP(fixup);
6737}
6738
6739Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6740 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6741 return Address::RIP(fixup);
6742}
6743
Andreas Gampe85b62f22015-09-09 13:15:38 -07006744// TODO: trg as memory.
6745void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6746 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006747 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006748 return;
6749 }
6750
6751 DCHECK_NE(type, Primitive::kPrimVoid);
6752
6753 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6754 if (trg.Equals(return_loc)) {
6755 return;
6756 }
6757
6758 // Let the parallel move resolver take care of all of this.
6759 HParallelMove parallel_move(GetGraph()->GetArena());
6760 parallel_move.AddMove(return_loc, trg, type, nullptr);
6761 GetMoveResolver()->EmitNativeCode(&parallel_move);
6762}
6763
Mark Mendell9c86b482015-09-18 13:36:07 -04006764Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6765 // Create a fixup to be used to create and address the jump table.
6766 JumpTableRIPFixup* table_fixup =
6767 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6768
6769 // We have to populate the jump tables.
6770 fixups_to_jump_tables_.push_back(table_fixup);
6771 return Address::RIP(table_fixup);
6772}
6773
Mark Mendellea5af682015-10-22 17:35:49 -04006774void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6775 const Address& addr_high,
6776 int64_t v,
6777 HInstruction* instruction) {
6778 if (IsInt<32>(v)) {
6779 int32_t v_32 = v;
6780 __ movq(addr_low, Immediate(v_32));
6781 MaybeRecordImplicitNullCheck(instruction);
6782 } else {
6783 // Didn't fit in a register. Do it in pieces.
6784 int32_t low_v = Low32Bits(v);
6785 int32_t high_v = High32Bits(v);
6786 __ movl(addr_low, Immediate(low_v));
6787 MaybeRecordImplicitNullCheck(instruction);
6788 __ movl(addr_high, Immediate(high_v));
6789 }
6790}
6791
Roland Levillain4d027112015-07-01 15:41:14 +01006792#undef __
6793
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006794} // namespace x86_64
6795} // namespace art