blob: 1943ddc6f71ff776bdcf66e8cc6d36445ea19f78 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010095 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000096 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000097 }
98
Alexandre Rames8158f282015-08-07 10:26:17 +010099 bool IsFatal() const OVERRIDE { return true; }
100
Alexandre Rames9931f312015-06-19 14:47:01 +0100101 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
102
Calin Juravled0d48522014-11-04 16:40:20 +0000103 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000104 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
105};
106
Andreas Gampe85b62f22015-09-09 13:15:38 -0700107class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000108 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000109 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
110 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000111
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000113 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000114 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 if (is_div_) {
116 __ negl(cpu_reg_);
117 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400118 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 }
120
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000121 } else {
122 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 if (is_div_) {
124 __ negq(cpu_reg_);
125 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400126 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000128 }
Calin Juravled0d48522014-11-04 16:40:20 +0000129 __ jmp(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
133
Calin Juravled0d48522014-11-04 16:40:20 +0000134 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000136 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 const bool is_div_;
138 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000139};
140
Andreas Gampe85b62f22015-09-09 13:15:38 -0700141class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000142 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100143 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000144 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000146 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000147 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100149 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000150 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 if (successor_ == nullptr) {
152 __ jmp(GetReturnLabel());
153 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000154 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100155 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000156 }
157
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100158 Label* GetReturnLabel() {
159 DCHECK(successor_ == nullptr);
160 return &return_label_;
161 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000162
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100163 HBasicBlock* GetSuccessor() const {
164 return successor_;
165 }
166
Alexandre Rames9931f312015-06-19 14:47:01 +0100167 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
168
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000169 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100170 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000171 Label return_label_;
172
173 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
174};
175
Andreas Gampe85b62f22015-09-09 13:15:38 -0700176class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100177 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000179 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000181 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100182 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000183 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100184 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000185 if (instruction_->CanThrowIntoCatchBlock()) {
186 // Live registers will be restored in the catch block if caught.
187 SaveLiveRegisters(codegen, instruction_->GetLocations());
188 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400189 // Are we using an array length from memory?
190 HInstruction* array_length = instruction_->InputAt(1);
191 Location length_loc = locations->InAt(1);
192 InvokeRuntimeCallingConvention calling_convention;
193 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
194 // Load the array length into our temporary.
195 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
196 Location array_loc = array_length->GetLocations()->InAt(0);
197 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
199 // Check for conflicts with index.
200 if (length_loc.Equals(locations->InAt(0))) {
201 // We know we aren't using parameter 2.
202 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
203 }
204 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
205 }
206
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000207 // We're moving two locations to locations that could overlap, so we need a parallel
208 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000209 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100210 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000211 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100212 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400213 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100214 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
215 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100216 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
217 ? kQuickThrowStringBounds
218 : kQuickThrowArrayBounds;
219 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100220 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000221 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100222 }
223
Alexandre Rames8158f282015-08-07 10:26:17 +0100224 bool IsFatal() const OVERRIDE { return true; }
225
Alexandre Rames9931f312015-06-19 14:47:01 +0100226 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
227
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100228 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100229 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
230};
231
Andreas Gampe85b62f22015-09-09 13:15:38 -0700232class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100233 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000234 LoadClassSlowPathX86_64(HLoadClass* cls,
235 HInstruction* at,
236 uint32_t dex_pc,
237 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000238 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000239 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
240 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100241
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000242 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000243 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000244 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100246
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000247 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000248
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100249 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000250 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100251 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000252 at_,
253 dex_pc_,
254 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000255 if (do_clinit_) {
256 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
257 } else {
258 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
259 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000261 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000263 if (out.IsValid()) {
264 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000265 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 }
267
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000268 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269 __ jmp(GetExitLabel());
270 }
271
Alexandre Rames9931f312015-06-19 14:47:01 +0100272 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
273
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100274 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // The class this slow path will load.
276 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100277
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000278 // The instruction where this slow path is happening.
279 // (Might be the load class or an initialization check).
280 HInstruction* const at_;
281
282 // The dex PC of `at_`.
283 const uint32_t dex_pc_;
284
285 // Whether to initialize the class.
286 const bool do_clinit_;
287
288 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100289};
290
Andreas Gampe85b62f22015-09-09 13:15:38 -0700291class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000292 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000293 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000294 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000295
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000296 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000297 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100298 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
299 : locations->Out();
300 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000301 DCHECK(instruction_->IsCheckCast()
302 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000303
Roland Levillain0d5a2812015-11-13 10:07:31 +0000304 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000305 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000306
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000307 if (!is_fatal_) {
308 SaveLiveRegisters(codegen, locations);
309 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000310
311 // We're moving two locations to locations that could overlap, so we need a parallel
312 // move resolver.
313 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000314 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100315 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000316 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100317 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100318 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100319 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
320 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000321
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000322 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100323 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000324 CheckEntrypointTypes<
Andreas Gampe67409972016-07-19 22:34:53 -0700325 kQuickInstanceofNonTrivial, size_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000326 } else {
327 DCHECK(instruction_->IsCheckCast());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100328 x86_64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000329 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000330 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000331
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000332 if (!is_fatal_) {
333 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000334 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000335 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000336
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000337 RestoreLiveRegisters(codegen, locations);
338 __ jmp(GetExitLabel());
339 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000340 }
341
Alexandre Rames9931f312015-06-19 14:47:01 +0100342 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
343
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000344 bool IsFatal() const OVERRIDE { return is_fatal_; }
345
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000347 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000348
349 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
350};
351
Andreas Gampe85b62f22015-09-09 13:15:38 -0700352class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700353 public:
Aart Bik42249c32016-01-07 15:33:50 -0800354 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000355 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700356
357 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000358 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700359 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100360 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000361 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700362 }
363
Alexandre Rames9931f312015-06-19 14:47:01 +0100364 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
365
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700366 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700367 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
368};
369
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100370class ArraySetSlowPathX86_64 : public SlowPathCode {
371 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000372 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100373
374 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
375 LocationSummary* locations = instruction_->GetLocations();
376 __ Bind(GetEntryLabel());
377 SaveLiveRegisters(codegen, locations);
378
379 InvokeRuntimeCallingConvention calling_convention;
380 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
381 parallel_move.AddMove(
382 locations->InAt(0),
383 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
384 Primitive::kPrimNot,
385 nullptr);
386 parallel_move.AddMove(
387 locations->InAt(1),
388 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
389 Primitive::kPrimInt,
390 nullptr);
391 parallel_move.AddMove(
392 locations->InAt(2),
393 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
394 Primitive::kPrimNot,
395 nullptr);
396 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
397
Roland Levillain0d5a2812015-11-13 10:07:31 +0000398 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100399 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000400 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100401 RestoreLiveRegisters(codegen, locations);
402 __ jmp(GetExitLabel());
403 }
404
405 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
406
407 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100408 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
409};
410
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000411// Slow path marking an object during a read barrier.
412class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
413 public:
Vladimir Marko953437b2016-08-24 08:30:46 +0000414 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location obj, bool unpoison)
415 : SlowPathCode(instruction), obj_(obj), unpoison_(unpoison) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000416 DCHECK(kEmitCompilerReadBarrier);
417 }
418
419 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
420
421 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
422 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain02b75802016-07-13 11:54:35 +0100423 Register reg = obj_.AsRegister<Register>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000424 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100425 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000426 DCHECK(instruction_->IsInstanceFieldGet() ||
427 instruction_->IsStaticFieldGet() ||
428 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100429 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000430 instruction_->IsLoadClass() ||
431 instruction_->IsLoadString() ||
432 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100433 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100434 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
435 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000436 << "Unexpected instruction in read barrier marking slow path: "
437 << instruction_->DebugName();
438
439 __ Bind(GetEntryLabel());
Vladimir Marko953437b2016-08-24 08:30:46 +0000440 if (unpoison_) {
441 // Object* ref = ref_addr->AsMirrorPtr()
442 __ MaybeUnpoisonHeapReference(obj_.AsRegister<CpuRegister>());
443 }
Roland Levillain4359e612016-07-20 11:32:19 +0100444 // No need to save live registers; it's taken care of by the
445 // entrypoint. Also, there is no need to update the stack mask,
446 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000447 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100448 DCHECK_NE(reg, RSP);
449 DCHECK(0 <= reg && reg < kNumberOfCpuRegisters) << reg;
450 // "Compact" slow path, saving two moves.
451 //
452 // Instead of using the standard runtime calling convention (input
453 // and output in R0):
454 //
455 // RDI <- obj
456 // RAX <- ReadBarrierMark(RDI)
457 // obj <- RAX
458 //
459 // we just use rX (the register holding `obj`) as input and output
460 // of a dedicated entrypoint:
461 //
462 // rX <- ReadBarrierMarkRegX(rX)
463 //
464 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700465 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100466 // This runtime call does not require a stack map.
467 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468 __ jmp(GetExitLabel());
469 }
470
471 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000472 const Location obj_;
Vladimir Marko953437b2016-08-24 08:30:46 +0000473 const bool unpoison_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000474
475 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
476};
477
Roland Levillain0d5a2812015-11-13 10:07:31 +0000478// Slow path generating a read barrier for a heap reference.
479class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
480 public:
481 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
482 Location out,
483 Location ref,
484 Location obj,
485 uint32_t offset,
486 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000487 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000488 out_(out),
489 ref_(ref),
490 obj_(obj),
491 offset_(offset),
492 index_(index) {
493 DCHECK(kEmitCompilerReadBarrier);
494 // If `obj` is equal to `out` or `ref`, it means the initial
495 // object has been overwritten by (or after) the heap object
496 // reference load to be instrumented, e.g.:
497 //
498 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000500 //
501 // In that case, we have lost the information about the original
502 // object, and the emitted read barrier cannot work properly.
503 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
504 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
505}
506
507 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
508 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
509 LocationSummary* locations = instruction_->GetLocations();
510 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
511 DCHECK(locations->CanCall());
512 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100513 DCHECK(instruction_->IsInstanceFieldGet() ||
514 instruction_->IsStaticFieldGet() ||
515 instruction_->IsArrayGet() ||
516 instruction_->IsInstanceOf() ||
517 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100518 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000519 << "Unexpected instruction in read barrier for heap reference slow path: "
520 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000521
522 __ Bind(GetEntryLabel());
523 SaveLiveRegisters(codegen, locations);
524
525 // We may have to change the index's value, but as `index_` is a
526 // constant member (like other "inputs" of this slow path),
527 // introduce a copy of it, `index`.
528 Location index = index_;
529 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100530 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000531 if (instruction_->IsArrayGet()) {
532 // Compute real offset and store it in index_.
533 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
534 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
535 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
536 // We are about to change the value of `index_reg` (see the
537 // calls to art::x86_64::X86_64Assembler::shll and
538 // art::x86_64::X86_64Assembler::AddImmediate below), but it
539 // has not been saved by the previous call to
540 // art::SlowPathCode::SaveLiveRegisters, as it is a
541 // callee-save register --
542 // art::SlowPathCode::SaveLiveRegisters does not consider
543 // callee-save registers, as it has been designed with the
544 // assumption that callee-save registers are supposed to be
545 // handled by the called function. So, as a callee-save
546 // register, `index_reg` _would_ eventually be saved onto
547 // the stack, but it would be too late: we would have
548 // changed its value earlier. Therefore, we manually save
549 // it here into another freely available register,
550 // `free_reg`, chosen of course among the caller-save
551 // registers (as a callee-save `free_reg` register would
552 // exhibit the same problem).
553 //
554 // Note we could have requested a temporary register from
555 // the register allocator instead; but we prefer not to, as
556 // this is a slow path, and we know we can find a
557 // caller-save register that is available.
558 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
559 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
560 index_reg = free_reg;
561 index = Location::RegisterLocation(index_reg);
562 } else {
563 // The initial register stored in `index_` has already been
564 // saved in the call to art::SlowPathCode::SaveLiveRegisters
565 // (as it is not a callee-save register), so we can freely
566 // use it.
567 }
568 // Shifting the index value contained in `index_reg` by the
569 // scale factor (2) cannot overflow in practice, as the
570 // runtime is unable to allocate object arrays with a size
571 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
572 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
573 static_assert(
574 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
575 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
576 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
577 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100578 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
579 // intrinsics, `index_` is not shifted by a scale factor of 2
580 // (as in the case of ArrayGet), as it is actually an offset
581 // to an object field within an object.
582 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000583 DCHECK(instruction_->GetLocations()->Intrinsified());
584 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
585 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
586 << instruction_->AsInvoke()->GetIntrinsic();
587 DCHECK_EQ(offset_, 0U);
588 DCHECK(index_.IsRegister());
589 }
590 }
591
592 // We're moving two or three locations to locations that could
593 // overlap, so we need a parallel move resolver.
594 InvokeRuntimeCallingConvention calling_convention;
595 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
596 parallel_move.AddMove(ref_,
597 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
598 Primitive::kPrimNot,
599 nullptr);
600 parallel_move.AddMove(obj_,
601 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
602 Primitive::kPrimNot,
603 nullptr);
604 if (index.IsValid()) {
605 parallel_move.AddMove(index,
606 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
607 Primitive::kPrimInt,
608 nullptr);
609 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
610 } else {
611 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
612 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
613 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100614 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000615 instruction_,
616 instruction_->GetDexPc(),
617 this);
618 CheckEntrypointTypes<
619 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
620 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
621
622 RestoreLiveRegisters(codegen, locations);
623 __ jmp(GetExitLabel());
624 }
625
626 const char* GetDescription() const OVERRIDE {
627 return "ReadBarrierForHeapReferenceSlowPathX86_64";
628 }
629
630 private:
631 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
632 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
633 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
634 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
635 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
636 return static_cast<CpuRegister>(i);
637 }
638 }
639 // We shall never fail to find a free caller-save register, as
640 // there are more than two core caller-save registers on x86-64
641 // (meaning it is possible to find one which is different from
642 // `ref` and `obj`).
643 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
644 LOG(FATAL) << "Could not find a free caller-save register";
645 UNREACHABLE();
646 }
647
Roland Levillain0d5a2812015-11-13 10:07:31 +0000648 const Location out_;
649 const Location ref_;
650 const Location obj_;
651 const uint32_t offset_;
652 // An additional location containing an index to an array.
653 // Only used for HArrayGet and the UnsafeGetObject &
654 // UnsafeGetObjectVolatile intrinsics.
655 const Location index_;
656
657 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
658};
659
660// Slow path generating a read barrier for a GC root.
661class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
662 public:
663 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000664 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000665 DCHECK(kEmitCompilerReadBarrier);
666 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000667
668 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
669 LocationSummary* locations = instruction_->GetLocations();
670 DCHECK(locations->CanCall());
671 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000672 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
673 << "Unexpected instruction in read barrier for GC root slow path: "
674 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000675
676 __ Bind(GetEntryLabel());
677 SaveLiveRegisters(codegen, locations);
678
679 InvokeRuntimeCallingConvention calling_convention;
680 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
681 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100682 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000683 instruction_,
684 instruction_->GetDexPc(),
685 this);
686 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
687 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
688
689 RestoreLiveRegisters(codegen, locations);
690 __ jmp(GetExitLabel());
691 }
692
693 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
694
695 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000696 const Location out_;
697 const Location root_;
698
699 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
700};
701
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100702#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100703// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
704#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100705
Roland Levillain4fa13f62015-07-06 18:11:54 +0100706inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700707 switch (cond) {
708 case kCondEQ: return kEqual;
709 case kCondNE: return kNotEqual;
710 case kCondLT: return kLess;
711 case kCondLE: return kLessEqual;
712 case kCondGT: return kGreater;
713 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700714 case kCondB: return kBelow;
715 case kCondBE: return kBelowEqual;
716 case kCondA: return kAbove;
717 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700718 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100719 LOG(FATAL) << "Unreachable";
720 UNREACHABLE();
721}
722
Aart Bike9f37602015-10-09 11:15:55 -0700723// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100724inline Condition X86_64FPCondition(IfCondition cond) {
725 switch (cond) {
726 case kCondEQ: return kEqual;
727 case kCondNE: return kNotEqual;
728 case kCondLT: return kBelow;
729 case kCondLE: return kBelowEqual;
730 case kCondGT: return kAbove;
731 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700732 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100733 };
734 LOG(FATAL) << "Unreachable";
735 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700736}
737
Vladimir Markodc151b22015-10-15 18:02:30 +0100738HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
739 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
740 MethodReference target_method ATTRIBUTE_UNUSED) {
741 switch (desired_dispatch_info.code_ptr_location) {
742 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
743 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
744 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
745 return HInvokeStaticOrDirect::DispatchInfo {
746 desired_dispatch_info.method_load_kind,
747 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
748 desired_dispatch_info.method_load_data,
749 0u
750 };
751 default:
752 return desired_dispatch_info;
753 }
754}
755
Serguei Katkov288c7a82016-05-16 11:53:15 +0600756Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
757 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800758 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000759 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
760 switch (invoke->GetMethodLoadKind()) {
761 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
762 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000763 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000764 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000765 break;
766 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000767 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000768 break;
769 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
770 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
771 break;
772 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
773 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
774 method_patches_.emplace_back(invoke->GetTargetMethod());
775 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
776 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000777 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000778 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000779 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000780 // Bind a new fixup label at the end of the "movl" insn.
781 uint32_t offset = invoke->GetDexCacheArrayOffset();
782 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000783 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000784 }
Vladimir Marko58155012015-08-19 12:49:41 +0000785 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000786 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000787 Register method_reg;
788 CpuRegister reg = temp.AsRegister<CpuRegister>();
789 if (current_method.IsRegister()) {
790 method_reg = current_method.AsRegister<Register>();
791 } else {
792 DCHECK(invoke->GetLocations()->Intrinsified());
793 DCHECK(!current_method.IsValid());
794 method_reg = reg.AsRegister();
795 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
796 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000797 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100798 __ movq(reg,
799 Address(CpuRegister(method_reg),
800 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100801 // temp = temp[index_in_cache];
802 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
803 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000804 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
805 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100806 }
Vladimir Marko58155012015-08-19 12:49:41 +0000807 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600808 return callee_method;
809}
810
811void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
812 Location temp) {
813 // All registers are assumed to be correctly set up.
814 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000815
816 switch (invoke->GetCodePtrLocation()) {
817 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
818 __ call(&frame_entry_label_);
819 break;
820 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
821 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
822 Label* label = &relative_call_patches_.back().label;
823 __ call(label); // Bind to the patch label, override at link time.
824 __ Bind(label); // Bind the label at the end of the "call" insn.
825 break;
826 }
827 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
828 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100829 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
830 LOG(FATAL) << "Unsupported";
831 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000832 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
833 // (callee_method + offset_of_quick_compiled_code)()
834 __ call(Address(callee_method.AsRegister<CpuRegister>(),
835 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700836 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000837 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000838 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800839
840 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800841}
842
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000843void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
844 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
845 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
846 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000847
848 // Use the calling convention instead of the location of the receiver, as
849 // intrinsics may have put the receiver in a different register. In the intrinsics
850 // slow path, the arguments have been moved to the right place, so here we are
851 // guaranteed that the receiver is the first register of the calling convention.
852 InvokeDexCallingConvention calling_convention;
853 Register receiver = calling_convention.GetRegisterAt(0);
854
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000855 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000856 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000857 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000858 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000859 // Instead of simply (possibly) unpoisoning `temp` here, we should
860 // emit a read barrier for the previous class reference load.
861 // However this is not required in practice, as this is an
862 // intermediate/temporary reference and because the current
863 // concurrent copying collector keeps the from-space memory
864 // intact/accessible until the end of the marking phase (the
865 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000866 __ MaybeUnpoisonHeapReference(temp);
867 // temp = temp->GetMethodAt(method_offset);
868 __ movq(temp, Address(temp, method_offset));
869 // call temp->GetEntryPoint();
870 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700871 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000872}
873
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000874void CodeGeneratorX86_64::RecordSimplePatch() {
875 if (GetCompilerOptions().GetIncludePatchInformation()) {
876 simple_patches_.emplace_back();
877 __ Bind(&simple_patches_.back());
878 }
879}
880
881void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
882 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
883 __ Bind(&string_patches_.back().label);
884}
885
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100886void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
887 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
888 __ Bind(&type_patches_.back().label);
889}
890
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000891Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
892 uint32_t element_offset) {
893 // Add a patch entry and return the label.
894 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
895 return &pc_relative_dex_cache_patches_.back().label;
896}
897
Vladimir Marko58155012015-08-19 12:49:41 +0000898void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
899 DCHECK(linker_patches->empty());
900 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000901 method_patches_.size() +
902 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000903 pc_relative_dex_cache_patches_.size() +
904 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100905 string_patches_.size() +
906 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000907 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000908 // The label points to the end of the "movl" insn but the literal offset for method
909 // patch needs to point to the embedded constant which occupies the last 4 bytes.
910 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000911 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000912 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000913 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
914 info.target_method.dex_file,
915 info.target_method.dex_method_index));
916 }
917 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000918 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000919 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
920 info.target_method.dex_file,
921 info.target_method.dex_method_index));
922 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000923 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
924 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000925 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
926 &info.target_dex_file,
927 info.label.Position(),
928 info.element_offset));
929 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000930 for (const Label& label : simple_patches_) {
931 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
932 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
933 }
934 for (const StringPatchInfo<Label>& info : string_patches_) {
935 // These are always PC-relative, see GetSupportedLoadStringKind().
936 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
937 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
938 &info.dex_file,
939 info.label.Position(),
940 info.string_index));
941 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100942 for (const TypePatchInfo<Label>& info : type_patches_) {
943 // These are always PC-relative, see GetSupportedLoadClassKind().
944 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
945 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
946 &info.dex_file,
947 info.label.Position(),
948 info.type_index));
949 }
Vladimir Marko58155012015-08-19 12:49:41 +0000950}
951
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100952void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100953 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100954}
955
956void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100957 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100958}
959
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100960size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
961 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
962 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100963}
964
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100965size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
966 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
967 return kX86_64WordSize;
968}
969
970size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
971 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
972 return kX86_64WordSize;
973}
974
975size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
976 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
977 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100978}
979
Calin Juravle175dc732015-08-25 15:42:32 +0100980void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
981 HInstruction* instruction,
982 uint32_t dex_pc,
983 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +0100984 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100985 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
986 if (EntrypointRequiresStackMap(entrypoint)) {
987 RecordPcInfo(instruction, dex_pc, slow_path);
988 }
Alexandre Rames8158f282015-08-07 10:26:17 +0100989}
990
Roland Levillaindec8f632016-07-22 17:10:06 +0100991void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
992 HInstruction* instruction,
993 SlowPathCode* slow_path) {
994 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100995 GenerateInvokeRuntime(entry_point_offset);
996}
997
998void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +0100999 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1000}
1001
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001002static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001003// Use a fake return address register to mimic Quick.
1004static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001005CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001006 const X86_64InstructionSetFeatures& isa_features,
1007 const CompilerOptions& compiler_options,
1008 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001009 : CodeGenerator(graph,
1010 kNumberOfCpuRegisters,
1011 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001012 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001013 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1014 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001015 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001016 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1017 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001018 compiler_options,
1019 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001020 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001021 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001022 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001023 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001024 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001025 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001026 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001027 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1028 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001029 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001030 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1031 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001032 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001033 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001034 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1035}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001036
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001037InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1038 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001039 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001040 assembler_(codegen->GetAssembler()),
1041 codegen_(codegen) {}
1042
David Brazdil58282f42016-01-14 12:45:10 +00001043void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001044 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001045 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001046
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001047 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001048 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001049}
1050
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001051static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001052 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001053}
David Srbecky9d8606d2015-04-12 09:35:32 +01001054
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001055static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001056 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001057}
1058
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001059void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001060 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001061 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001062 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001063 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001064 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001065
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001066 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001067 __ testq(CpuRegister(RAX), Address(
1068 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001069 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001070 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001071
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001072 if (HasEmptyFrame()) {
1073 return;
1074 }
1075
Nicolas Geoffray98893962015-01-21 12:32:32 +00001076 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001077 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001078 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001079 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001080 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1081 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001082 }
1083 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001084
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001085 int adjust = GetFrameSize() - GetCoreSpillSize();
1086 __ subq(CpuRegister(RSP), Immediate(adjust));
1087 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001088 uint32_t xmm_spill_location = GetFpuSpillStart();
1089 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001090
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001091 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1092 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001093 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1094 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1095 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001096 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001097 }
1098
Mathieu Chartiere401d142015-04-22 13:56:20 -07001099 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001100 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001101}
1102
1103void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001104 __ cfi().RememberState();
1105 if (!HasEmptyFrame()) {
1106 uint32_t xmm_spill_location = GetFpuSpillStart();
1107 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1108 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1109 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1110 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1111 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1112 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1113 }
1114 }
1115
1116 int adjust = GetFrameSize() - GetCoreSpillSize();
1117 __ addq(CpuRegister(RSP), Immediate(adjust));
1118 __ cfi().AdjustCFAOffset(-adjust);
1119
1120 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1121 Register reg = kCoreCalleeSaves[i];
1122 if (allocated_registers_.ContainsCoreRegister(reg)) {
1123 __ popq(CpuRegister(reg));
1124 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1125 __ cfi().Restore(DWARFReg(reg));
1126 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001127 }
1128 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001129 __ ret();
1130 __ cfi().RestoreState();
1131 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001132}
1133
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001134void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1135 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001136}
1137
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001138void CodeGeneratorX86_64::Move(Location destination, Location source) {
1139 if (source.Equals(destination)) {
1140 return;
1141 }
1142 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001143 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001144 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001145 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001146 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001147 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001148 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001149 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1150 } else if (source.IsConstant()) {
1151 HConstant* constant = source.GetConstant();
1152 if (constant->IsLongConstant()) {
1153 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1154 } else {
1155 Load32BitValue(dest, GetInt32ValueOf(constant));
1156 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001157 } else {
1158 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001159 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001160 }
1161 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001162 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001163 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001164 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001165 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001166 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1167 } else if (source.IsConstant()) {
1168 HConstant* constant = source.GetConstant();
1169 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1170 if (constant->IsFloatConstant()) {
1171 Load32BitValue(dest, static_cast<int32_t>(value));
1172 } else {
1173 Load64BitValue(dest, value);
1174 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001175 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001176 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001177 } else {
1178 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001179 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001180 }
1181 } else if (destination.IsStackSlot()) {
1182 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001183 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001184 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001185 } else if (source.IsFpuRegister()) {
1186 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001187 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001188 } else if (source.IsConstant()) {
1189 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001190 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001191 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001192 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001193 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001194 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1195 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001196 }
1197 } else {
1198 DCHECK(destination.IsDoubleStackSlot());
1199 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001200 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001201 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001202 } else if (source.IsFpuRegister()) {
1203 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001204 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001205 } else if (source.IsConstant()) {
1206 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001207 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1208 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001209 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001210 } else {
1211 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001212 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1213 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001214 }
1215 }
1216}
1217
Calin Juravle175dc732015-08-25 15:42:32 +01001218void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1219 DCHECK(location.IsRegister());
1220 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1221}
1222
Calin Juravlee460d1d2015-09-29 04:52:17 +01001223void CodeGeneratorX86_64::MoveLocation(
1224 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1225 Move(dst, src);
1226}
1227
1228void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1229 if (location.IsRegister()) {
1230 locations->AddTemp(location);
1231 } else {
1232 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1233 }
1234}
1235
David Brazdilfc6a86a2015-06-26 10:33:45 +00001236void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001237 DCHECK(!successor->IsExitBlock());
1238
1239 HBasicBlock* block = got->GetBlock();
1240 HInstruction* previous = got->GetPrevious();
1241
1242 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001243 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001244 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1245 return;
1246 }
1247
1248 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1249 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1250 }
1251 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001252 __ jmp(codegen_->GetLabelOf(successor));
1253 }
1254}
1255
David Brazdilfc6a86a2015-06-26 10:33:45 +00001256void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1257 got->SetLocations(nullptr);
1258}
1259
1260void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1261 HandleGoto(got, got->GetSuccessor());
1262}
1263
1264void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1265 try_boundary->SetLocations(nullptr);
1266}
1267
1268void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1269 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1270 if (!successor->IsExitBlock()) {
1271 HandleGoto(try_boundary, successor);
1272 }
1273}
1274
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001275void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1276 exit->SetLocations(nullptr);
1277}
1278
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001279void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001280}
1281
Mark Mendell152408f2015-12-31 12:28:50 -05001282template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001283void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001284 LabelType* true_label,
1285 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001286 if (cond->IsFPConditionTrueIfNaN()) {
1287 __ j(kUnordered, true_label);
1288 } else if (cond->IsFPConditionFalseIfNaN()) {
1289 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001290 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001291 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001292}
1293
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001294void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001295 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001296
Mark Mendellc4701932015-04-10 13:18:51 -04001297 Location left = locations->InAt(0);
1298 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001299 Primitive::Type type = condition->InputAt(0)->GetType();
1300 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001301 case Primitive::kPrimBoolean:
1302 case Primitive::kPrimByte:
1303 case Primitive::kPrimChar:
1304 case Primitive::kPrimShort:
1305 case Primitive::kPrimInt:
1306 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001307 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001308 break;
1309 }
Mark Mendellc4701932015-04-10 13:18:51 -04001310 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001311 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001312 break;
1313 }
1314 case Primitive::kPrimFloat: {
1315 if (right.IsFpuRegister()) {
1316 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1317 } else if (right.IsConstant()) {
1318 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1319 codegen_->LiteralFloatAddress(
1320 right.GetConstant()->AsFloatConstant()->GetValue()));
1321 } else {
1322 DCHECK(right.IsStackSlot());
1323 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1324 Address(CpuRegister(RSP), right.GetStackIndex()));
1325 }
Mark Mendellc4701932015-04-10 13:18:51 -04001326 break;
1327 }
1328 case Primitive::kPrimDouble: {
1329 if (right.IsFpuRegister()) {
1330 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1331 } else if (right.IsConstant()) {
1332 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1333 codegen_->LiteralDoubleAddress(
1334 right.GetConstant()->AsDoubleConstant()->GetValue()));
1335 } else {
1336 DCHECK(right.IsDoubleStackSlot());
1337 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1338 Address(CpuRegister(RSP), right.GetStackIndex()));
1339 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001340 break;
1341 }
1342 default:
1343 LOG(FATAL) << "Unexpected condition type " << type;
1344 }
1345}
1346
1347template<class LabelType>
1348void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1349 LabelType* true_target_in,
1350 LabelType* false_target_in) {
1351 // Generated branching requires both targets to be explicit. If either of the
1352 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1353 LabelType fallthrough_target;
1354 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1355 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1356
1357 // Generate the comparison to set the CC.
1358 GenerateCompareTest(condition);
1359
1360 // Now generate the correct jump(s).
1361 Primitive::Type type = condition->InputAt(0)->GetType();
1362 switch (type) {
1363 case Primitive::kPrimLong: {
1364 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1365 break;
1366 }
1367 case Primitive::kPrimFloat: {
1368 GenerateFPJumps(condition, true_target, false_target);
1369 break;
1370 }
1371 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001372 GenerateFPJumps(condition, true_target, false_target);
1373 break;
1374 }
1375 default:
1376 LOG(FATAL) << "Unexpected condition type " << type;
1377 }
1378
David Brazdil0debae72015-11-12 18:37:00 +00001379 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001380 __ jmp(false_target);
1381 }
David Brazdil0debae72015-11-12 18:37:00 +00001382
1383 if (fallthrough_target.IsLinked()) {
1384 __ Bind(&fallthrough_target);
1385 }
Mark Mendellc4701932015-04-10 13:18:51 -04001386}
1387
David Brazdil0debae72015-11-12 18:37:00 +00001388static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1389 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1390 // are set only strictly before `branch`. We can't use the eflags on long
1391 // conditions if they are materialized due to the complex branching.
1392 return cond->IsCondition() &&
1393 cond->GetNext() == branch &&
1394 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1395}
1396
Mark Mendell152408f2015-12-31 12:28:50 -05001397template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001398void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001399 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001400 LabelType* true_target,
1401 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001402 HInstruction* cond = instruction->InputAt(condition_input_index);
1403
1404 if (true_target == nullptr && false_target == nullptr) {
1405 // Nothing to do. The code always falls through.
1406 return;
1407 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001408 // Constant condition, statically compared against "true" (integer value 1).
1409 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001410 if (true_target != nullptr) {
1411 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001412 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001413 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001414 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001415 if (false_target != nullptr) {
1416 __ jmp(false_target);
1417 }
1418 }
1419 return;
1420 }
1421
1422 // The following code generates these patterns:
1423 // (1) true_target == nullptr && false_target != nullptr
1424 // - opposite condition true => branch to false_target
1425 // (2) true_target != nullptr && false_target == nullptr
1426 // - condition true => branch to true_target
1427 // (3) true_target != nullptr && false_target != nullptr
1428 // - condition true => branch to true_target
1429 // - branch to false_target
1430 if (IsBooleanValueOrMaterializedCondition(cond)) {
1431 if (AreEflagsSetFrom(cond, instruction)) {
1432 if (true_target == nullptr) {
1433 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1434 } else {
1435 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1436 }
1437 } else {
1438 // Materialized condition, compare against 0.
1439 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1440 if (lhs.IsRegister()) {
1441 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1442 } else {
1443 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1444 }
1445 if (true_target == nullptr) {
1446 __ j(kEqual, false_target);
1447 } else {
1448 __ j(kNotEqual, true_target);
1449 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001450 }
1451 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001452 // Condition has not been materialized, use its inputs as the
1453 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001454 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001455
David Brazdil0debae72015-11-12 18:37:00 +00001456 // If this is a long or FP comparison that has been folded into
1457 // the HCondition, generate the comparison directly.
1458 Primitive::Type type = condition->InputAt(0)->GetType();
1459 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1460 GenerateCompareTestAndBranch(condition, true_target, false_target);
1461 return;
1462 }
1463
1464 Location lhs = condition->GetLocations()->InAt(0);
1465 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001466 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001467 if (true_target == nullptr) {
1468 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1469 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001470 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001471 }
Dave Allison20dfc792014-06-16 20:44:29 -07001472 }
David Brazdil0debae72015-11-12 18:37:00 +00001473
1474 // If neither branch falls through (case 3), the conditional branch to `true_target`
1475 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1476 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001477 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001478 }
1479}
1480
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001481void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001482 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1483 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001484 locations->SetInAt(0, Location::Any());
1485 }
1486}
1487
1488void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001489 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1490 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1491 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1492 nullptr : codegen_->GetLabelOf(true_successor);
1493 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1494 nullptr : codegen_->GetLabelOf(false_successor);
1495 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001496}
1497
1498void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1499 LocationSummary* locations = new (GetGraph()->GetArena())
1500 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko239d6ea2016-09-05 10:44:04 +01001501 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001502 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001503 locations->SetInAt(0, Location::Any());
1504 }
1505}
1506
1507void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001508 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001509 GenerateTestAndBranch<Label>(deoptimize,
1510 /* condition_input_index */ 0,
1511 slow_path->GetEntryLabel(),
1512 /* false_target */ nullptr);
1513}
1514
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001515static bool SelectCanUseCMOV(HSelect* select) {
1516 // There are no conditional move instructions for XMMs.
1517 if (Primitive::IsFloatingPointType(select->GetType())) {
1518 return false;
1519 }
1520
1521 // A FP condition doesn't generate the single CC that we need.
1522 HInstruction* condition = select->GetCondition();
1523 if (condition->IsCondition() &&
1524 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1525 return false;
1526 }
1527
1528 // We can generate a CMOV for this Select.
1529 return true;
1530}
1531
David Brazdil74eb1b22015-12-14 11:44:01 +00001532void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1533 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1534 if (Primitive::IsFloatingPointType(select->GetType())) {
1535 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001536 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001537 } else {
1538 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001539 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001540 if (select->InputAt(1)->IsConstant()) {
1541 locations->SetInAt(1, Location::RequiresRegister());
1542 } else {
1543 locations->SetInAt(1, Location::Any());
1544 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001545 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001546 locations->SetInAt(1, Location::Any());
1547 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001548 }
1549 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1550 locations->SetInAt(2, Location::RequiresRegister());
1551 }
1552 locations->SetOut(Location::SameAsFirstInput());
1553}
1554
1555void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1556 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001557 if (SelectCanUseCMOV(select)) {
1558 // If both the condition and the source types are integer, we can generate
1559 // a CMOV to implement Select.
1560 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001561 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001562 DCHECK(locations->InAt(0).Equals(locations->Out()));
1563
1564 HInstruction* select_condition = select->GetCondition();
1565 Condition cond = kNotEqual;
1566
1567 // Figure out how to test the 'condition'.
1568 if (select_condition->IsCondition()) {
1569 HCondition* condition = select_condition->AsCondition();
1570 if (!condition->IsEmittedAtUseSite()) {
1571 // This was a previously materialized condition.
1572 // Can we use the existing condition code?
1573 if (AreEflagsSetFrom(condition, select)) {
1574 // Materialization was the previous instruction. Condition codes are right.
1575 cond = X86_64IntegerCondition(condition->GetCondition());
1576 } else {
1577 // No, we have to recreate the condition code.
1578 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1579 __ testl(cond_reg, cond_reg);
1580 }
1581 } else {
1582 GenerateCompareTest(condition);
1583 cond = X86_64IntegerCondition(condition->GetCondition());
1584 }
1585 } else {
1586 // Must be a boolean condition, which needs to be compared to 0.
1587 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1588 __ testl(cond_reg, cond_reg);
1589 }
1590
1591 // If the condition is true, overwrite the output, which already contains false.
1592 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001593 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1594 if (value_true_loc.IsRegister()) {
1595 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1596 } else {
1597 __ cmov(cond,
1598 value_false,
1599 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1600 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001601 } else {
1602 NearLabel false_target;
1603 GenerateTestAndBranch<NearLabel>(select,
1604 /* condition_input_index */ 2,
1605 /* true_target */ nullptr,
1606 &false_target);
1607 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1608 __ Bind(&false_target);
1609 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001610}
1611
David Srbecky0cf44932015-12-09 14:09:59 +00001612void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1613 new (GetGraph()->GetArena()) LocationSummary(info);
1614}
1615
David Srbeckyd28f4a02016-03-14 17:14:24 +00001616void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1617 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001618}
1619
1620void CodeGeneratorX86_64::GenerateNop() {
1621 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001622}
1623
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001624void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001625 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001626 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001627 // Handle the long/FP comparisons made in instruction simplification.
1628 switch (cond->InputAt(0)->GetType()) {
1629 case Primitive::kPrimLong:
1630 locations->SetInAt(0, Location::RequiresRegister());
1631 locations->SetInAt(1, Location::Any());
1632 break;
1633 case Primitive::kPrimFloat:
1634 case Primitive::kPrimDouble:
1635 locations->SetInAt(0, Location::RequiresFpuRegister());
1636 locations->SetInAt(1, Location::Any());
1637 break;
1638 default:
1639 locations->SetInAt(0, Location::RequiresRegister());
1640 locations->SetInAt(1, Location::Any());
1641 break;
1642 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001643 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001644 locations->SetOut(Location::RequiresRegister());
1645 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001646}
1647
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001648void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001649 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001650 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001651 }
Mark Mendellc4701932015-04-10 13:18:51 -04001652
1653 LocationSummary* locations = cond->GetLocations();
1654 Location lhs = locations->InAt(0);
1655 Location rhs = locations->InAt(1);
1656 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001657 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001658
1659 switch (cond->InputAt(0)->GetType()) {
1660 default:
1661 // Integer case.
1662
1663 // Clear output register: setcc only sets the low byte.
1664 __ xorl(reg, reg);
1665
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001666 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001667 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001668 return;
1669 case Primitive::kPrimLong:
1670 // Clear output register: setcc only sets the low byte.
1671 __ xorl(reg, reg);
1672
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001673 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001674 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001675 return;
1676 case Primitive::kPrimFloat: {
1677 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1678 if (rhs.IsConstant()) {
1679 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1680 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1681 } else if (rhs.IsStackSlot()) {
1682 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1683 } else {
1684 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1685 }
1686 GenerateFPJumps(cond, &true_label, &false_label);
1687 break;
1688 }
1689 case Primitive::kPrimDouble: {
1690 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1691 if (rhs.IsConstant()) {
1692 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1693 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1694 } else if (rhs.IsDoubleStackSlot()) {
1695 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1696 } else {
1697 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1698 }
1699 GenerateFPJumps(cond, &true_label, &false_label);
1700 break;
1701 }
1702 }
1703
1704 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001705 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001706
Roland Levillain4fa13f62015-07-06 18:11:54 +01001707 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001708 __ Bind(&false_label);
1709 __ xorl(reg, reg);
1710 __ jmp(&done_label);
1711
Roland Levillain4fa13f62015-07-06 18:11:54 +01001712 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001713 __ Bind(&true_label);
1714 __ movl(reg, Immediate(1));
1715 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001716}
1717
1718void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001719 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001720}
1721
1722void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001723 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001724}
1725
1726void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001727 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001728}
1729
1730void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001731 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001732}
1733
1734void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001735 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001736}
1737
1738void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001739 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001740}
1741
1742void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001743 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001744}
1745
1746void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001747 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001748}
1749
1750void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001751 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001752}
1753
1754void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001755 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001756}
1757
1758void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001759 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001760}
1761
1762void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001763 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001764}
1765
Aart Bike9f37602015-10-09 11:15:55 -07001766void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001767 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001768}
1769
1770void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001771 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001772}
1773
1774void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001775 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001776}
1777
1778void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001779 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001780}
1781
1782void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001783 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001784}
1785
1786void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001787 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001788}
1789
1790void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001791 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001792}
1793
1794void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001795 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001796}
1797
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001798void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001799 LocationSummary* locations =
1800 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001801 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001802 case Primitive::kPrimBoolean:
1803 case Primitive::kPrimByte:
1804 case Primitive::kPrimShort:
1805 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001806 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001807 case Primitive::kPrimLong: {
1808 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001809 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001810 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1811 break;
1812 }
1813 case Primitive::kPrimFloat:
1814 case Primitive::kPrimDouble: {
1815 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001816 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001817 locations->SetOut(Location::RequiresRegister());
1818 break;
1819 }
1820 default:
1821 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1822 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001823}
1824
1825void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001826 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001827 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001828 Location left = locations->InAt(0);
1829 Location right = locations->InAt(1);
1830
Mark Mendell0c9497d2015-08-21 09:30:05 -04001831 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001832 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001833 Condition less_cond = kLess;
1834
Calin Juravleddb7df22014-11-25 20:56:51 +00001835 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001836 case Primitive::kPrimBoolean:
1837 case Primitive::kPrimByte:
1838 case Primitive::kPrimShort:
1839 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001840 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001841 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08001842 break;
1843 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001844 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001845 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001846 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001847 }
1848 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001849 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1850 if (right.IsConstant()) {
1851 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1852 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1853 } else if (right.IsStackSlot()) {
1854 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1855 } else {
1856 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1857 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001858 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001859 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001860 break;
1861 }
1862 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001863 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1864 if (right.IsConstant()) {
1865 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1866 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1867 } else if (right.IsDoubleStackSlot()) {
1868 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1869 } else {
1870 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1871 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001872 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001873 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001874 break;
1875 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001876 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001877 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001878 }
Aart Bika19616e2016-02-01 18:57:58 -08001879
Calin Juravleddb7df22014-11-25 20:56:51 +00001880 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001881 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001882 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001883
Calin Juravle91debbc2014-11-26 19:01:09 +00001884 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001885 __ movl(out, Immediate(1));
1886 __ jmp(&done);
1887
1888 __ Bind(&less);
1889 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001890
1891 __ Bind(&done);
1892}
1893
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001894void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001895 LocationSummary* locations =
1896 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001897 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001898}
1899
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001900void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001901 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001902}
1903
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001904void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1905 LocationSummary* locations =
1906 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1907 locations->SetOut(Location::ConstantLocation(constant));
1908}
1909
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001910void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001911 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001912}
1913
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001914void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001915 LocationSummary* locations =
1916 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001917 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001918}
1919
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001920void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001921 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001922}
1923
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001924void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1925 LocationSummary* locations =
1926 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1927 locations->SetOut(Location::ConstantLocation(constant));
1928}
1929
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001930void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001931 // Will be generated at use site.
1932}
1933
1934void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1935 LocationSummary* locations =
1936 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1937 locations->SetOut(Location::ConstantLocation(constant));
1938}
1939
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001940void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
1941 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001942 // Will be generated at use site.
1943}
1944
Calin Juravle27df7582015-04-17 19:12:31 +01001945void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1946 memory_barrier->SetLocations(nullptr);
1947}
1948
1949void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001950 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01001951}
1952
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001953void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
1954 ret->SetLocations(nullptr);
1955}
1956
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001957void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001958 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001959}
1960
1961void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001962 LocationSummary* locations =
1963 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001964 switch (ret->InputAt(0)->GetType()) {
1965 case Primitive::kPrimBoolean:
1966 case Primitive::kPrimByte:
1967 case Primitive::kPrimChar:
1968 case Primitive::kPrimShort:
1969 case Primitive::kPrimInt:
1970 case Primitive::kPrimNot:
1971 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001972 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001973 break;
1974
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001975 case Primitive::kPrimFloat:
1976 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04001977 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001978 break;
1979
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001980 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001981 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001982 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001983}
1984
1985void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
1986 if (kIsDebugBuild) {
1987 switch (ret->InputAt(0)->GetType()) {
1988 case Primitive::kPrimBoolean:
1989 case Primitive::kPrimByte:
1990 case Primitive::kPrimChar:
1991 case Primitive::kPrimShort:
1992 case Primitive::kPrimInt:
1993 case Primitive::kPrimNot:
1994 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00001995 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001996 break;
1997
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001998 case Primitive::kPrimFloat:
1999 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002000 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002001 XMM0);
2002 break;
2003
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002004 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002005 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002006 }
2007 }
2008 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002009}
2010
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002011Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2012 switch (type) {
2013 case Primitive::kPrimBoolean:
2014 case Primitive::kPrimByte:
2015 case Primitive::kPrimChar:
2016 case Primitive::kPrimShort:
2017 case Primitive::kPrimInt:
2018 case Primitive::kPrimNot:
2019 case Primitive::kPrimLong:
2020 return Location::RegisterLocation(RAX);
2021
2022 case Primitive::kPrimVoid:
2023 return Location::NoLocation();
2024
2025 case Primitive::kPrimDouble:
2026 case Primitive::kPrimFloat:
2027 return Location::FpuRegisterLocation(XMM0);
2028 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002029
2030 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002031}
2032
2033Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2034 return Location::RegisterLocation(kMethodRegisterArgument);
2035}
2036
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002037Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002038 switch (type) {
2039 case Primitive::kPrimBoolean:
2040 case Primitive::kPrimByte:
2041 case Primitive::kPrimChar:
2042 case Primitive::kPrimShort:
2043 case Primitive::kPrimInt:
2044 case Primitive::kPrimNot: {
2045 uint32_t index = gp_index_++;
2046 stack_index_++;
2047 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002048 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002049 } else {
2050 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2051 }
2052 }
2053
2054 case Primitive::kPrimLong: {
2055 uint32_t index = gp_index_;
2056 stack_index_ += 2;
2057 if (index < calling_convention.GetNumberOfRegisters()) {
2058 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002059 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002060 } else {
2061 gp_index_ += 2;
2062 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2063 }
2064 }
2065
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002066 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002067 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002068 stack_index_++;
2069 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002070 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002071 } else {
2072 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2073 }
2074 }
2075
2076 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002077 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002078 stack_index_ += 2;
2079 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002080 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002081 } else {
2082 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2083 }
2084 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002085
2086 case Primitive::kPrimVoid:
2087 LOG(FATAL) << "Unexpected parameter type " << type;
2088 break;
2089 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002090 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002091}
2092
Calin Juravle175dc732015-08-25 15:42:32 +01002093void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2094 // The trampoline uses the same calling convention as dex calling conventions,
2095 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2096 // the method_idx.
2097 HandleInvoke(invoke);
2098}
2099
2100void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2101 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2102}
2103
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002104void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002105 // Explicit clinit checks triggered by static invokes must have been pruned by
2106 // art::PrepareForRegisterAllocation.
2107 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002108
Mark Mendellfb8d2792015-03-31 22:16:59 -04002109 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002110 if (intrinsic.TryDispatch(invoke)) {
2111 return;
2112 }
2113
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002114 HandleInvoke(invoke);
2115}
2116
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002117static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2118 if (invoke->GetLocations()->Intrinsified()) {
2119 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2120 intrinsic.Dispatch(invoke);
2121 return true;
2122 }
2123 return false;
2124}
2125
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002126void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002127 // Explicit clinit checks triggered by static invokes must have been pruned by
2128 // art::PrepareForRegisterAllocation.
2129 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002130
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002131 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2132 return;
2133 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002134
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002135 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002136 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002137 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002138 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002139}
2140
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002141void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002142 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002143 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002144}
2145
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002146void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002147 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002148 if (intrinsic.TryDispatch(invoke)) {
2149 return;
2150 }
2151
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002152 HandleInvoke(invoke);
2153}
2154
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002155void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002156 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2157 return;
2158 }
2159
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002160 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002161 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002162 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002163}
2164
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002165void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2166 HandleInvoke(invoke);
2167 // Add the hidden argument.
2168 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2169}
2170
2171void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2172 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002173 LocationSummary* locations = invoke->GetLocations();
2174 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2175 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002176 Location receiver = locations->InAt(0);
2177 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2178
Roland Levillain0d5a2812015-11-13 10:07:31 +00002179 // Set the hidden argument. This is safe to do this here, as RAX
2180 // won't be modified thereafter, before the `call` instruction.
2181 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002182 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002183
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002184 if (receiver.IsStackSlot()) {
2185 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002186 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002187 __ movl(temp, Address(temp, class_offset));
2188 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002189 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002190 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002191 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002192 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002193 // Instead of simply (possibly) unpoisoning `temp` here, we should
2194 // emit a read barrier for the previous class reference load.
2195 // However this is not required in practice, as this is an
2196 // intermediate/temporary reference and because the current
2197 // concurrent copying collector keeps the from-space memory
2198 // intact/accessible until the end of the marking phase (the
2199 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002200 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002201 // temp = temp->GetAddressOfIMT()
2202 __ movq(temp,
2203 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2204 // temp = temp->GetImtEntryAt(method_offset);
2205 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002206 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002207 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002208 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002209 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002210 __ call(Address(
2211 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002212
2213 DCHECK(!codegen_->IsLeafMethod());
2214 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2215}
2216
Roland Levillain88cb1752014-10-20 16:36:47 +01002217void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2218 LocationSummary* locations =
2219 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2220 switch (neg->GetResultType()) {
2221 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002222 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002223 locations->SetInAt(0, Location::RequiresRegister());
2224 locations->SetOut(Location::SameAsFirstInput());
2225 break;
2226
Roland Levillain88cb1752014-10-20 16:36:47 +01002227 case Primitive::kPrimFloat:
2228 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002229 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002230 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002231 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002232 break;
2233
2234 default:
2235 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2236 }
2237}
2238
2239void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2240 LocationSummary* locations = neg->GetLocations();
2241 Location out = locations->Out();
2242 Location in = locations->InAt(0);
2243 switch (neg->GetResultType()) {
2244 case Primitive::kPrimInt:
2245 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002246 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002247 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002248 break;
2249
2250 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002251 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002252 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002253 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002254 break;
2255
Roland Levillain5368c212014-11-27 15:03:41 +00002256 case Primitive::kPrimFloat: {
2257 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002258 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002259 // Implement float negation with an exclusive or with value
2260 // 0x80000000 (mask for bit 31, representing the sign of a
2261 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002262 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002263 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002264 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002265 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002266
Roland Levillain5368c212014-11-27 15:03:41 +00002267 case Primitive::kPrimDouble: {
2268 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002269 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002270 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002271 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002272 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002273 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002274 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002275 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002276 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002277
2278 default:
2279 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2280 }
2281}
2282
Roland Levillaindff1f282014-11-05 14:15:05 +00002283void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2284 LocationSummary* locations =
2285 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2286 Primitive::Type result_type = conversion->GetResultType();
2287 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002288 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002289
David Brazdilb2bd1c52015-03-25 11:17:37 +00002290 // The Java language does not allow treating boolean as an integral type but
2291 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002292
Roland Levillaindff1f282014-11-05 14:15:05 +00002293 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002294 case Primitive::kPrimByte:
2295 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002296 case Primitive::kPrimLong:
2297 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002298 case Primitive::kPrimBoolean:
2299 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002300 case Primitive::kPrimShort:
2301 case Primitive::kPrimInt:
2302 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002303 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002304 locations->SetInAt(0, Location::Any());
2305 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2306 break;
2307
2308 default:
2309 LOG(FATAL) << "Unexpected type conversion from " << input_type
2310 << " to " << result_type;
2311 }
2312 break;
2313
Roland Levillain01a8d712014-11-14 16:27:39 +00002314 case Primitive::kPrimShort:
2315 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002316 case Primitive::kPrimLong:
2317 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002318 case Primitive::kPrimBoolean:
2319 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002320 case Primitive::kPrimByte:
2321 case Primitive::kPrimInt:
2322 case Primitive::kPrimChar:
2323 // Processing a Dex `int-to-short' instruction.
2324 locations->SetInAt(0, Location::Any());
2325 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2326 break;
2327
2328 default:
2329 LOG(FATAL) << "Unexpected type conversion from " << input_type
2330 << " to " << result_type;
2331 }
2332 break;
2333
Roland Levillain946e1432014-11-11 17:35:19 +00002334 case Primitive::kPrimInt:
2335 switch (input_type) {
2336 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002337 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002338 locations->SetInAt(0, Location::Any());
2339 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2340 break;
2341
2342 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002343 // Processing a Dex `float-to-int' instruction.
2344 locations->SetInAt(0, Location::RequiresFpuRegister());
2345 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002346 break;
2347
Roland Levillain946e1432014-11-11 17:35:19 +00002348 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002349 // Processing a Dex `double-to-int' instruction.
2350 locations->SetInAt(0, Location::RequiresFpuRegister());
2351 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002352 break;
2353
2354 default:
2355 LOG(FATAL) << "Unexpected type conversion from " << input_type
2356 << " to " << result_type;
2357 }
2358 break;
2359
Roland Levillaindff1f282014-11-05 14:15:05 +00002360 case Primitive::kPrimLong:
2361 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002362 case Primitive::kPrimBoolean:
2363 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002364 case Primitive::kPrimByte:
2365 case Primitive::kPrimShort:
2366 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002367 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002368 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002369 // TODO: We would benefit from a (to-be-implemented)
2370 // Location::RegisterOrStackSlot requirement for this input.
2371 locations->SetInAt(0, Location::RequiresRegister());
2372 locations->SetOut(Location::RequiresRegister());
2373 break;
2374
2375 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002376 // Processing a Dex `float-to-long' instruction.
2377 locations->SetInAt(0, Location::RequiresFpuRegister());
2378 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002379 break;
2380
Roland Levillaindff1f282014-11-05 14:15:05 +00002381 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002382 // Processing a Dex `double-to-long' instruction.
2383 locations->SetInAt(0, Location::RequiresFpuRegister());
2384 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002385 break;
2386
2387 default:
2388 LOG(FATAL) << "Unexpected type conversion from " << input_type
2389 << " to " << result_type;
2390 }
2391 break;
2392
Roland Levillain981e4542014-11-14 11:47:14 +00002393 case Primitive::kPrimChar:
2394 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002395 case Primitive::kPrimLong:
2396 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002397 case Primitive::kPrimBoolean:
2398 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002399 case Primitive::kPrimByte:
2400 case Primitive::kPrimShort:
2401 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002402 // Processing a Dex `int-to-char' instruction.
2403 locations->SetInAt(0, Location::Any());
2404 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2405 break;
2406
2407 default:
2408 LOG(FATAL) << "Unexpected type conversion from " << input_type
2409 << " to " << result_type;
2410 }
2411 break;
2412
Roland Levillaindff1f282014-11-05 14:15:05 +00002413 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002414 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002415 case Primitive::kPrimBoolean:
2416 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002417 case Primitive::kPrimByte:
2418 case Primitive::kPrimShort:
2419 case Primitive::kPrimInt:
2420 case Primitive::kPrimChar:
2421 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002422 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002423 locations->SetOut(Location::RequiresFpuRegister());
2424 break;
2425
2426 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002427 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002428 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002429 locations->SetOut(Location::RequiresFpuRegister());
2430 break;
2431
Roland Levillaincff13742014-11-17 14:32:17 +00002432 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002433 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002434 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002435 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002436 break;
2437
2438 default:
2439 LOG(FATAL) << "Unexpected type conversion from " << input_type
2440 << " to " << result_type;
2441 };
2442 break;
2443
Roland Levillaindff1f282014-11-05 14:15:05 +00002444 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002445 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002446 case Primitive::kPrimBoolean:
2447 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002448 case Primitive::kPrimByte:
2449 case Primitive::kPrimShort:
2450 case Primitive::kPrimInt:
2451 case Primitive::kPrimChar:
2452 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002453 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002454 locations->SetOut(Location::RequiresFpuRegister());
2455 break;
2456
2457 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002458 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002459 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002460 locations->SetOut(Location::RequiresFpuRegister());
2461 break;
2462
Roland Levillaincff13742014-11-17 14:32:17 +00002463 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002464 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002465 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002466 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002467 break;
2468
2469 default:
2470 LOG(FATAL) << "Unexpected type conversion from " << input_type
2471 << " to " << result_type;
2472 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002473 break;
2474
2475 default:
2476 LOG(FATAL) << "Unexpected type conversion from " << input_type
2477 << " to " << result_type;
2478 }
2479}
2480
2481void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2482 LocationSummary* locations = conversion->GetLocations();
2483 Location out = locations->Out();
2484 Location in = locations->InAt(0);
2485 Primitive::Type result_type = conversion->GetResultType();
2486 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002487 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002488 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002489 case Primitive::kPrimByte:
2490 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002491 case Primitive::kPrimLong:
2492 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002493 case Primitive::kPrimBoolean:
2494 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002495 case Primitive::kPrimShort:
2496 case Primitive::kPrimInt:
2497 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002498 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002499 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002500 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002501 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002502 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002503 Address(CpuRegister(RSP), in.GetStackIndex()));
2504 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002505 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002506 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002507 }
2508 break;
2509
2510 default:
2511 LOG(FATAL) << "Unexpected type conversion from " << input_type
2512 << " to " << result_type;
2513 }
2514 break;
2515
Roland Levillain01a8d712014-11-14 16:27:39 +00002516 case Primitive::kPrimShort:
2517 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002518 case Primitive::kPrimLong:
2519 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002520 case Primitive::kPrimBoolean:
2521 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002522 case Primitive::kPrimByte:
2523 case Primitive::kPrimInt:
2524 case Primitive::kPrimChar:
2525 // Processing a Dex `int-to-short' instruction.
2526 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002527 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002528 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002529 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002530 Address(CpuRegister(RSP), in.GetStackIndex()));
2531 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002532 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002533 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002534 }
2535 break;
2536
2537 default:
2538 LOG(FATAL) << "Unexpected type conversion from " << input_type
2539 << " to " << result_type;
2540 }
2541 break;
2542
Roland Levillain946e1432014-11-11 17:35:19 +00002543 case Primitive::kPrimInt:
2544 switch (input_type) {
2545 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002546 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002547 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002548 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002549 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002550 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002551 Address(CpuRegister(RSP), in.GetStackIndex()));
2552 } else {
2553 DCHECK(in.IsConstant());
2554 DCHECK(in.GetConstant()->IsLongConstant());
2555 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002556 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002557 }
2558 break;
2559
Roland Levillain3f8f9362014-12-02 17:45:01 +00002560 case Primitive::kPrimFloat: {
2561 // Processing a Dex `float-to-int' instruction.
2562 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2563 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002564 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002565
2566 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002567 // if input >= (float)INT_MAX goto done
2568 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002569 __ j(kAboveEqual, &done);
2570 // if input == NaN goto nan
2571 __ j(kUnordered, &nan);
2572 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002573 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002574 __ jmp(&done);
2575 __ Bind(&nan);
2576 // output = 0
2577 __ xorl(output, output);
2578 __ Bind(&done);
2579 break;
2580 }
2581
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002582 case Primitive::kPrimDouble: {
2583 // Processing a Dex `double-to-int' instruction.
2584 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2585 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002586 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002587
2588 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002589 // if input >= (double)INT_MAX goto done
2590 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002591 __ j(kAboveEqual, &done);
2592 // if input == NaN goto nan
2593 __ j(kUnordered, &nan);
2594 // output = double-to-int-truncate(input)
2595 __ cvttsd2si(output, input);
2596 __ jmp(&done);
2597 __ Bind(&nan);
2598 // output = 0
2599 __ xorl(output, output);
2600 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002601 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002602 }
Roland Levillain946e1432014-11-11 17:35:19 +00002603
2604 default:
2605 LOG(FATAL) << "Unexpected type conversion from " << input_type
2606 << " to " << result_type;
2607 }
2608 break;
2609
Roland Levillaindff1f282014-11-05 14:15:05 +00002610 case Primitive::kPrimLong:
2611 switch (input_type) {
2612 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002613 case Primitive::kPrimBoolean:
2614 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002615 case Primitive::kPrimByte:
2616 case Primitive::kPrimShort:
2617 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002618 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002619 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002620 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002621 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002622 break;
2623
Roland Levillain624279f2014-12-04 11:54:28 +00002624 case Primitive::kPrimFloat: {
2625 // Processing a Dex `float-to-long' instruction.
2626 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2627 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002628 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002629
Mark Mendell92e83bf2015-05-07 11:25:03 -04002630 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002631 // if input >= (float)LONG_MAX goto done
2632 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002633 __ j(kAboveEqual, &done);
2634 // if input == NaN goto nan
2635 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002636 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002637 __ cvttss2si(output, input, true);
2638 __ jmp(&done);
2639 __ Bind(&nan);
2640 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002641 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002642 __ Bind(&done);
2643 break;
2644 }
2645
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002646 case Primitive::kPrimDouble: {
2647 // Processing a Dex `double-to-long' instruction.
2648 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2649 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002650 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002651
Mark Mendell92e83bf2015-05-07 11:25:03 -04002652 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002653 // if input >= (double)LONG_MAX goto done
2654 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002655 __ j(kAboveEqual, &done);
2656 // if input == NaN goto nan
2657 __ j(kUnordered, &nan);
2658 // output = double-to-long-truncate(input)
2659 __ cvttsd2si(output, input, true);
2660 __ jmp(&done);
2661 __ Bind(&nan);
2662 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002663 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002664 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002665 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002666 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002667
2668 default:
2669 LOG(FATAL) << "Unexpected type conversion from " << input_type
2670 << " to " << result_type;
2671 }
2672 break;
2673
Roland Levillain981e4542014-11-14 11:47:14 +00002674 case Primitive::kPrimChar:
2675 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002676 case Primitive::kPrimLong:
2677 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002678 case Primitive::kPrimBoolean:
2679 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002680 case Primitive::kPrimByte:
2681 case Primitive::kPrimShort:
2682 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002683 // Processing a Dex `int-to-char' instruction.
2684 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002685 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002686 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002687 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002688 Address(CpuRegister(RSP), in.GetStackIndex()));
2689 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002690 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002691 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002692 }
2693 break;
2694
2695 default:
2696 LOG(FATAL) << "Unexpected type conversion from " << input_type
2697 << " to " << result_type;
2698 }
2699 break;
2700
Roland Levillaindff1f282014-11-05 14:15:05 +00002701 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002702 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002703 case Primitive::kPrimBoolean:
2704 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002705 case Primitive::kPrimByte:
2706 case Primitive::kPrimShort:
2707 case Primitive::kPrimInt:
2708 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002709 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002710 if (in.IsRegister()) {
2711 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2712 } else if (in.IsConstant()) {
2713 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2714 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002715 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002716 } else {
2717 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2718 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2719 }
Roland Levillaincff13742014-11-17 14:32:17 +00002720 break;
2721
2722 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002723 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002724 if (in.IsRegister()) {
2725 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2726 } else if (in.IsConstant()) {
2727 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2728 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002729 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002730 } else {
2731 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2732 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2733 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002734 break;
2735
Roland Levillaincff13742014-11-17 14:32:17 +00002736 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002737 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002738 if (in.IsFpuRegister()) {
2739 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2740 } else if (in.IsConstant()) {
2741 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2742 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002743 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002744 } else {
2745 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2746 Address(CpuRegister(RSP), in.GetStackIndex()));
2747 }
Roland Levillaincff13742014-11-17 14:32:17 +00002748 break;
2749
2750 default:
2751 LOG(FATAL) << "Unexpected type conversion from " << input_type
2752 << " to " << result_type;
2753 };
2754 break;
2755
Roland Levillaindff1f282014-11-05 14:15:05 +00002756 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002757 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002758 case Primitive::kPrimBoolean:
2759 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002760 case Primitive::kPrimByte:
2761 case Primitive::kPrimShort:
2762 case Primitive::kPrimInt:
2763 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002764 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002765 if (in.IsRegister()) {
2766 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2767 } else if (in.IsConstant()) {
2768 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2769 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002770 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002771 } else {
2772 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2773 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2774 }
Roland Levillaincff13742014-11-17 14:32:17 +00002775 break;
2776
2777 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002778 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002779 if (in.IsRegister()) {
2780 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2781 } else if (in.IsConstant()) {
2782 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2783 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002784 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002785 } else {
2786 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2787 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2788 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002789 break;
2790
Roland Levillaincff13742014-11-17 14:32:17 +00002791 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002792 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002793 if (in.IsFpuRegister()) {
2794 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2795 } else if (in.IsConstant()) {
2796 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2797 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002798 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002799 } else {
2800 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2801 Address(CpuRegister(RSP), in.GetStackIndex()));
2802 }
Roland Levillaincff13742014-11-17 14:32:17 +00002803 break;
2804
2805 default:
2806 LOG(FATAL) << "Unexpected type conversion from " << input_type
2807 << " to " << result_type;
2808 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002809 break;
2810
2811 default:
2812 LOG(FATAL) << "Unexpected type conversion from " << input_type
2813 << " to " << result_type;
2814 }
2815}
2816
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002817void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002818 LocationSummary* locations =
2819 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002820 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002821 case Primitive::kPrimInt: {
2822 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002823 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2824 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002825 break;
2826 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002827
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002828 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002829 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002830 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002831 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002832 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002833 break;
2834 }
2835
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002836 case Primitive::kPrimDouble:
2837 case Primitive::kPrimFloat: {
2838 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002839 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002840 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002841 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002842 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002843
2844 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002845 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002846 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002847}
2848
2849void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2850 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002851 Location first = locations->InAt(0);
2852 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002853 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002854
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002855 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002856 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002857 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002858 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2859 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002860 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2861 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002862 } else {
2863 __ leal(out.AsRegister<CpuRegister>(), Address(
2864 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2865 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002866 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002867 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2868 __ addl(out.AsRegister<CpuRegister>(),
2869 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2870 } else {
2871 __ leal(out.AsRegister<CpuRegister>(), Address(
2872 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2873 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002874 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002875 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002876 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002877 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002878 break;
2879 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002880
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002881 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002882 if (second.IsRegister()) {
2883 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2884 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002885 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2886 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002887 } else {
2888 __ leaq(out.AsRegister<CpuRegister>(), Address(
2889 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2890 }
2891 } else {
2892 DCHECK(second.IsConstant());
2893 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2894 int32_t int32_value = Low32Bits(value);
2895 DCHECK_EQ(int32_value, value);
2896 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2897 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2898 } else {
2899 __ leaq(out.AsRegister<CpuRegister>(), Address(
2900 first.AsRegister<CpuRegister>(), int32_value));
2901 }
2902 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002903 break;
2904 }
2905
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002906 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002907 if (second.IsFpuRegister()) {
2908 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2909 } else if (second.IsConstant()) {
2910 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002911 codegen_->LiteralFloatAddress(
2912 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002913 } else {
2914 DCHECK(second.IsStackSlot());
2915 __ addss(first.AsFpuRegister<XmmRegister>(),
2916 Address(CpuRegister(RSP), second.GetStackIndex()));
2917 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002918 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002919 }
2920
2921 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002922 if (second.IsFpuRegister()) {
2923 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2924 } else if (second.IsConstant()) {
2925 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002926 codegen_->LiteralDoubleAddress(
2927 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002928 } else {
2929 DCHECK(second.IsDoubleStackSlot());
2930 __ addsd(first.AsFpuRegister<XmmRegister>(),
2931 Address(CpuRegister(RSP), second.GetStackIndex()));
2932 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002933 break;
2934 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002935
2936 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002937 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002938 }
2939}
2940
2941void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002942 LocationSummary* locations =
2943 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002944 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002945 case Primitive::kPrimInt: {
2946 locations->SetInAt(0, Location::RequiresRegister());
2947 locations->SetInAt(1, Location::Any());
2948 locations->SetOut(Location::SameAsFirstInput());
2949 break;
2950 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002951 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002952 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04002953 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002954 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002955 break;
2956 }
Calin Juravle11351682014-10-23 15:38:15 +01002957 case Primitive::kPrimFloat:
2958 case Primitive::kPrimDouble: {
2959 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002960 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01002961 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002962 break;
Calin Juravle11351682014-10-23 15:38:15 +01002963 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002964 default:
Calin Juravle11351682014-10-23 15:38:15 +01002965 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002966 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002967}
2968
2969void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
2970 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01002971 Location first = locations->InAt(0);
2972 Location second = locations->InAt(1);
2973 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002974 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002975 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01002976 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002977 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01002978 } else if (second.IsConstant()) {
2979 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002980 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002981 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002982 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002983 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002984 break;
2985 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002986 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04002987 if (second.IsConstant()) {
2988 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2989 DCHECK(IsInt<32>(value));
2990 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
2991 } else {
2992 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
2993 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002994 break;
2995 }
2996
Calin Juravle11351682014-10-23 15:38:15 +01002997 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002998 if (second.IsFpuRegister()) {
2999 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3000 } else if (second.IsConstant()) {
3001 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003002 codegen_->LiteralFloatAddress(
3003 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003004 } else {
3005 DCHECK(second.IsStackSlot());
3006 __ subss(first.AsFpuRegister<XmmRegister>(),
3007 Address(CpuRegister(RSP), second.GetStackIndex()));
3008 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003009 break;
Calin Juravle11351682014-10-23 15:38:15 +01003010 }
3011
3012 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003013 if (second.IsFpuRegister()) {
3014 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3015 } else if (second.IsConstant()) {
3016 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003017 codegen_->LiteralDoubleAddress(
3018 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003019 } else {
3020 DCHECK(second.IsDoubleStackSlot());
3021 __ subsd(first.AsFpuRegister<XmmRegister>(),
3022 Address(CpuRegister(RSP), second.GetStackIndex()));
3023 }
Calin Juravle11351682014-10-23 15:38:15 +01003024 break;
3025 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003026
3027 default:
Calin Juravle11351682014-10-23 15:38:15 +01003028 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003029 }
3030}
3031
Calin Juravle34bacdf2014-10-07 20:23:36 +01003032void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3033 LocationSummary* locations =
3034 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3035 switch (mul->GetResultType()) {
3036 case Primitive::kPrimInt: {
3037 locations->SetInAt(0, Location::RequiresRegister());
3038 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003039 if (mul->InputAt(1)->IsIntConstant()) {
3040 // Can use 3 operand multiply.
3041 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3042 } else {
3043 locations->SetOut(Location::SameAsFirstInput());
3044 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003045 break;
3046 }
3047 case Primitive::kPrimLong: {
3048 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003049 locations->SetInAt(1, Location::Any());
3050 if (mul->InputAt(1)->IsLongConstant() &&
3051 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003052 // Can use 3 operand multiply.
3053 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3054 } else {
3055 locations->SetOut(Location::SameAsFirstInput());
3056 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003057 break;
3058 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003059 case Primitive::kPrimFloat:
3060 case Primitive::kPrimDouble: {
3061 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003062 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003063 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003064 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003065 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003066
3067 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003068 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003069 }
3070}
3071
3072void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3073 LocationSummary* locations = mul->GetLocations();
3074 Location first = locations->InAt(0);
3075 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003076 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003077 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003078 case Primitive::kPrimInt:
3079 // The constant may have ended up in a register, so test explicitly to avoid
3080 // problems where the output may not be the same as the first operand.
3081 if (mul->InputAt(1)->IsIntConstant()) {
3082 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3083 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3084 } else if (second.IsRegister()) {
3085 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003086 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003087 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003088 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003089 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003090 __ imull(first.AsRegister<CpuRegister>(),
3091 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003092 }
3093 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003094 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003095 // The constant may have ended up in a register, so test explicitly to avoid
3096 // problems where the output may not be the same as the first operand.
3097 if (mul->InputAt(1)->IsLongConstant()) {
3098 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3099 if (IsInt<32>(value)) {
3100 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3101 Immediate(static_cast<int32_t>(value)));
3102 } else {
3103 // Have to use the constant area.
3104 DCHECK(first.Equals(out));
3105 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3106 }
3107 } else if (second.IsRegister()) {
3108 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003109 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003110 } else {
3111 DCHECK(second.IsDoubleStackSlot());
3112 DCHECK(first.Equals(out));
3113 __ imulq(first.AsRegister<CpuRegister>(),
3114 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003115 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003116 break;
3117 }
3118
Calin Juravleb5bfa962014-10-21 18:02:24 +01003119 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003120 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003121 if (second.IsFpuRegister()) {
3122 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3123 } else if (second.IsConstant()) {
3124 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003125 codegen_->LiteralFloatAddress(
3126 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003127 } else {
3128 DCHECK(second.IsStackSlot());
3129 __ mulss(first.AsFpuRegister<XmmRegister>(),
3130 Address(CpuRegister(RSP), second.GetStackIndex()));
3131 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003132 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003133 }
3134
3135 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003136 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003137 if (second.IsFpuRegister()) {
3138 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3139 } else if (second.IsConstant()) {
3140 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003141 codegen_->LiteralDoubleAddress(
3142 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003143 } else {
3144 DCHECK(second.IsDoubleStackSlot());
3145 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3146 Address(CpuRegister(RSP), second.GetStackIndex()));
3147 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003148 break;
3149 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003150
3151 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003152 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003153 }
3154}
3155
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003156void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3157 uint32_t stack_adjustment, bool is_float) {
3158 if (source.IsStackSlot()) {
3159 DCHECK(is_float);
3160 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3161 } else if (source.IsDoubleStackSlot()) {
3162 DCHECK(!is_float);
3163 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3164 } else {
3165 // Write the value to the temporary location on the stack and load to FP stack.
3166 if (is_float) {
3167 Location stack_temp = Location::StackSlot(temp_offset);
3168 codegen_->Move(stack_temp, source);
3169 __ flds(Address(CpuRegister(RSP), temp_offset));
3170 } else {
3171 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3172 codegen_->Move(stack_temp, source);
3173 __ fldl(Address(CpuRegister(RSP), temp_offset));
3174 }
3175 }
3176}
3177
3178void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3179 Primitive::Type type = rem->GetResultType();
3180 bool is_float = type == Primitive::kPrimFloat;
3181 size_t elem_size = Primitive::ComponentSize(type);
3182 LocationSummary* locations = rem->GetLocations();
3183 Location first = locations->InAt(0);
3184 Location second = locations->InAt(1);
3185 Location out = locations->Out();
3186
3187 // Create stack space for 2 elements.
3188 // TODO: enhance register allocator to ask for stack temporaries.
3189 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3190
3191 // Load the values to the FP stack in reverse order, using temporaries if needed.
3192 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3193 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3194
3195 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003196 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003197 __ Bind(&retry);
3198 __ fprem();
3199
3200 // Move FP status to AX.
3201 __ fstsw();
3202
3203 // And see if the argument reduction is complete. This is signaled by the
3204 // C2 FPU flag bit set to 0.
3205 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3206 __ j(kNotEqual, &retry);
3207
3208 // We have settled on the final value. Retrieve it into an XMM register.
3209 // Store FP top of stack to real stack.
3210 if (is_float) {
3211 __ fsts(Address(CpuRegister(RSP), 0));
3212 } else {
3213 __ fstl(Address(CpuRegister(RSP), 0));
3214 }
3215
3216 // Pop the 2 items from the FP stack.
3217 __ fucompp();
3218
3219 // Load the value from the stack into an XMM register.
3220 DCHECK(out.IsFpuRegister()) << out;
3221 if (is_float) {
3222 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3223 } else {
3224 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3225 }
3226
3227 // And remove the temporary stack space we allocated.
3228 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3229}
3230
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003231void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3232 DCHECK(instruction->IsDiv() || instruction->IsRem());
3233
3234 LocationSummary* locations = instruction->GetLocations();
3235 Location second = locations->InAt(1);
3236 DCHECK(second.IsConstant());
3237
3238 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3239 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003240 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003241
3242 DCHECK(imm == 1 || imm == -1);
3243
3244 switch (instruction->GetResultType()) {
3245 case Primitive::kPrimInt: {
3246 if (instruction->IsRem()) {
3247 __ xorl(output_register, output_register);
3248 } else {
3249 __ movl(output_register, input_register);
3250 if (imm == -1) {
3251 __ negl(output_register);
3252 }
3253 }
3254 break;
3255 }
3256
3257 case Primitive::kPrimLong: {
3258 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003259 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003260 } else {
3261 __ movq(output_register, input_register);
3262 if (imm == -1) {
3263 __ negq(output_register);
3264 }
3265 }
3266 break;
3267 }
3268
3269 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003270 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003271 }
3272}
3273
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003274void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003275 LocationSummary* locations = instruction->GetLocations();
3276 Location second = locations->InAt(1);
3277
3278 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3279 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3280
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003281 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003282 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3283 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003284
3285 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3286
3287 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003288 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003289 __ testl(numerator, numerator);
3290 __ cmov(kGreaterEqual, tmp, numerator);
3291 int shift = CTZ(imm);
3292 __ sarl(tmp, Immediate(shift));
3293
3294 if (imm < 0) {
3295 __ negl(tmp);
3296 }
3297
3298 __ movl(output_register, tmp);
3299 } else {
3300 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3301 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3302
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003303 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003304 __ addq(rdx, numerator);
3305 __ testq(numerator, numerator);
3306 __ cmov(kGreaterEqual, rdx, numerator);
3307 int shift = CTZ(imm);
3308 __ sarq(rdx, Immediate(shift));
3309
3310 if (imm < 0) {
3311 __ negq(rdx);
3312 }
3313
3314 __ movq(output_register, rdx);
3315 }
3316}
3317
3318void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3319 DCHECK(instruction->IsDiv() || instruction->IsRem());
3320
3321 LocationSummary* locations = instruction->GetLocations();
3322 Location second = locations->InAt(1);
3323
3324 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3325 : locations->GetTemp(0).AsRegister<CpuRegister>();
3326 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3327 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3328 : locations->Out().AsRegister<CpuRegister>();
3329 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3330
3331 DCHECK_EQ(RAX, eax.AsRegister());
3332 DCHECK_EQ(RDX, edx.AsRegister());
3333 if (instruction->IsDiv()) {
3334 DCHECK_EQ(RAX, out.AsRegister());
3335 } else {
3336 DCHECK_EQ(RDX, out.AsRegister());
3337 }
3338
3339 int64_t magic;
3340 int shift;
3341
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003342 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003343 if (instruction->GetResultType() == Primitive::kPrimInt) {
3344 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3345
3346 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3347
3348 __ movl(numerator, eax);
3349
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003350 __ movl(eax, Immediate(magic));
3351 __ imull(numerator);
3352
3353 if (imm > 0 && magic < 0) {
3354 __ addl(edx, numerator);
3355 } else if (imm < 0 && magic > 0) {
3356 __ subl(edx, numerator);
3357 }
3358
3359 if (shift != 0) {
3360 __ sarl(edx, Immediate(shift));
3361 }
3362
3363 __ movl(eax, edx);
3364 __ shrl(edx, Immediate(31));
3365 __ addl(edx, eax);
3366
3367 if (instruction->IsRem()) {
3368 __ movl(eax, numerator);
3369 __ imull(edx, Immediate(imm));
3370 __ subl(eax, edx);
3371 __ movl(edx, eax);
3372 } else {
3373 __ movl(eax, edx);
3374 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003375 } else {
3376 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3377
3378 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3379
3380 CpuRegister rax = eax;
3381 CpuRegister rdx = edx;
3382
3383 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3384
3385 // Save the numerator.
3386 __ movq(numerator, rax);
3387
3388 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003389 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003390
3391 // RDX:RAX = magic * numerator
3392 __ imulq(numerator);
3393
3394 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003395 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003396 __ addq(rdx, numerator);
3397 } else if (imm < 0 && magic > 0) {
3398 // RDX -= numerator
3399 __ subq(rdx, numerator);
3400 }
3401
3402 // Shift if needed.
3403 if (shift != 0) {
3404 __ sarq(rdx, Immediate(shift));
3405 }
3406
3407 // RDX += 1 if RDX < 0
3408 __ movq(rax, rdx);
3409 __ shrq(rdx, Immediate(63));
3410 __ addq(rdx, rax);
3411
3412 if (instruction->IsRem()) {
3413 __ movq(rax, numerator);
3414
3415 if (IsInt<32>(imm)) {
3416 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3417 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003418 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003419 }
3420
3421 __ subq(rax, rdx);
3422 __ movq(rdx, rax);
3423 } else {
3424 __ movq(rax, rdx);
3425 }
3426 }
3427}
3428
Calin Juravlebacfec32014-11-14 15:54:36 +00003429void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3430 DCHECK(instruction->IsDiv() || instruction->IsRem());
3431 Primitive::Type type = instruction->GetResultType();
3432 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3433
3434 bool is_div = instruction->IsDiv();
3435 LocationSummary* locations = instruction->GetLocations();
3436
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003437 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3438 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003439
Roland Levillain271ab9c2014-11-27 15:23:57 +00003440 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003441 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003442
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003443 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003444 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003445
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003446 if (imm == 0) {
3447 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3448 } else if (imm == 1 || imm == -1) {
3449 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003450 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003451 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003452 } else {
3453 DCHECK(imm <= -2 || imm >= 2);
3454 GenerateDivRemWithAnyConstant(instruction);
3455 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003456 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003457 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003458 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003459 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003460 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003461
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003462 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3463 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3464 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3465 // so it's safe to just use negl instead of more complex comparisons.
3466 if (type == Primitive::kPrimInt) {
3467 __ cmpl(second_reg, Immediate(-1));
3468 __ j(kEqual, slow_path->GetEntryLabel());
3469 // edx:eax <- sign-extended of eax
3470 __ cdq();
3471 // eax = quotient, edx = remainder
3472 __ idivl(second_reg);
3473 } else {
3474 __ cmpq(second_reg, Immediate(-1));
3475 __ j(kEqual, slow_path->GetEntryLabel());
3476 // rdx:rax <- sign-extended of rax
3477 __ cqo();
3478 // rax = quotient, rdx = remainder
3479 __ idivq(second_reg);
3480 }
3481 __ Bind(slow_path->GetExitLabel());
3482 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003483}
3484
Calin Juravle7c4954d2014-10-28 16:57:40 +00003485void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3486 LocationSummary* locations =
3487 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3488 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003489 case Primitive::kPrimInt:
3490 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003491 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003492 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003493 locations->SetOut(Location::SameAsFirstInput());
3494 // Intel uses edx:eax as the dividend.
3495 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003496 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3497 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3498 // output and request another temp.
3499 if (div->InputAt(1)->IsConstant()) {
3500 locations->AddTemp(Location::RequiresRegister());
3501 }
Calin Juravled0d48522014-11-04 16:40:20 +00003502 break;
3503 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003504
Calin Juravle7c4954d2014-10-28 16:57:40 +00003505 case Primitive::kPrimFloat:
3506 case Primitive::kPrimDouble: {
3507 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003508 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003509 locations->SetOut(Location::SameAsFirstInput());
3510 break;
3511 }
3512
3513 default:
3514 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3515 }
3516}
3517
3518void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3519 LocationSummary* locations = div->GetLocations();
3520 Location first = locations->InAt(0);
3521 Location second = locations->InAt(1);
3522 DCHECK(first.Equals(locations->Out()));
3523
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003524 Primitive::Type type = div->GetResultType();
3525 switch (type) {
3526 case Primitive::kPrimInt:
3527 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003528 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003529 break;
3530 }
3531
Calin Juravle7c4954d2014-10-28 16:57:40 +00003532 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003533 if (second.IsFpuRegister()) {
3534 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3535 } else if (second.IsConstant()) {
3536 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003537 codegen_->LiteralFloatAddress(
3538 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003539 } else {
3540 DCHECK(second.IsStackSlot());
3541 __ divss(first.AsFpuRegister<XmmRegister>(),
3542 Address(CpuRegister(RSP), second.GetStackIndex()));
3543 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003544 break;
3545 }
3546
3547 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003548 if (second.IsFpuRegister()) {
3549 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3550 } else if (second.IsConstant()) {
3551 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003552 codegen_->LiteralDoubleAddress(
3553 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003554 } else {
3555 DCHECK(second.IsDoubleStackSlot());
3556 __ divsd(first.AsFpuRegister<XmmRegister>(),
3557 Address(CpuRegister(RSP), second.GetStackIndex()));
3558 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003559 break;
3560 }
3561
3562 default:
3563 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3564 }
3565}
3566
Calin Juravlebacfec32014-11-14 15:54:36 +00003567void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003568 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003569 LocationSummary* locations =
3570 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003571
3572 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003573 case Primitive::kPrimInt:
3574 case Primitive::kPrimLong: {
3575 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003576 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003577 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3578 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003579 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3580 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3581 // output and request another temp.
3582 if (rem->InputAt(1)->IsConstant()) {
3583 locations->AddTemp(Location::RequiresRegister());
3584 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003585 break;
3586 }
3587
3588 case Primitive::kPrimFloat:
3589 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003590 locations->SetInAt(0, Location::Any());
3591 locations->SetInAt(1, Location::Any());
3592 locations->SetOut(Location::RequiresFpuRegister());
3593 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003594 break;
3595 }
3596
3597 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003598 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003599 }
3600}
3601
3602void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3603 Primitive::Type type = rem->GetResultType();
3604 switch (type) {
3605 case Primitive::kPrimInt:
3606 case Primitive::kPrimLong: {
3607 GenerateDivRemIntegral(rem);
3608 break;
3609 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003610 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003611 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003612 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003613 break;
3614 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003615 default:
3616 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3617 }
3618}
3619
Calin Juravled0d48522014-11-04 16:40:20 +00003620void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003621 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3622 ? LocationSummary::kCallOnSlowPath
3623 : LocationSummary::kNoCall;
3624 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003625 locations->SetInAt(0, Location::Any());
3626 if (instruction->HasUses()) {
3627 locations->SetOut(Location::SameAsFirstInput());
3628 }
3629}
3630
3631void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003632 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003633 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3634 codegen_->AddSlowPath(slow_path);
3635
3636 LocationSummary* locations = instruction->GetLocations();
3637 Location value = locations->InAt(0);
3638
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003639 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003640 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003641 case Primitive::kPrimByte:
3642 case Primitive::kPrimChar:
3643 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003644 case Primitive::kPrimInt: {
3645 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003646 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003647 __ j(kEqual, slow_path->GetEntryLabel());
3648 } else if (value.IsStackSlot()) {
3649 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3650 __ j(kEqual, slow_path->GetEntryLabel());
3651 } else {
3652 DCHECK(value.IsConstant()) << value;
3653 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003654 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003655 }
3656 }
3657 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003658 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003659 case Primitive::kPrimLong: {
3660 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003661 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003662 __ j(kEqual, slow_path->GetEntryLabel());
3663 } else if (value.IsDoubleStackSlot()) {
3664 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3665 __ j(kEqual, slow_path->GetEntryLabel());
3666 } else {
3667 DCHECK(value.IsConstant()) << value;
3668 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003669 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003670 }
3671 }
3672 break;
3673 }
3674 default:
3675 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003676 }
Calin Juravled0d48522014-11-04 16:40:20 +00003677}
3678
Calin Juravle9aec02f2014-11-18 23:06:35 +00003679void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3680 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3681
3682 LocationSummary* locations =
3683 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3684
3685 switch (op->GetResultType()) {
3686 case Primitive::kPrimInt:
3687 case Primitive::kPrimLong: {
3688 locations->SetInAt(0, Location::RequiresRegister());
3689 // The shift count needs to be in CL.
3690 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3691 locations->SetOut(Location::SameAsFirstInput());
3692 break;
3693 }
3694 default:
3695 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3696 }
3697}
3698
3699void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3700 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3701
3702 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003703 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003704 Location second = locations->InAt(1);
3705
3706 switch (op->GetResultType()) {
3707 case Primitive::kPrimInt: {
3708 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003709 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003710 if (op->IsShl()) {
3711 __ shll(first_reg, second_reg);
3712 } else if (op->IsShr()) {
3713 __ sarl(first_reg, second_reg);
3714 } else {
3715 __ shrl(first_reg, second_reg);
3716 }
3717 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003718 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003719 if (op->IsShl()) {
3720 __ shll(first_reg, imm);
3721 } else if (op->IsShr()) {
3722 __ sarl(first_reg, imm);
3723 } else {
3724 __ shrl(first_reg, imm);
3725 }
3726 }
3727 break;
3728 }
3729 case Primitive::kPrimLong: {
3730 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003731 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003732 if (op->IsShl()) {
3733 __ shlq(first_reg, second_reg);
3734 } else if (op->IsShr()) {
3735 __ sarq(first_reg, second_reg);
3736 } else {
3737 __ shrq(first_reg, second_reg);
3738 }
3739 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003740 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003741 if (op->IsShl()) {
3742 __ shlq(first_reg, imm);
3743 } else if (op->IsShr()) {
3744 __ sarq(first_reg, imm);
3745 } else {
3746 __ shrq(first_reg, imm);
3747 }
3748 }
3749 break;
3750 }
3751 default:
3752 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003753 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003754 }
3755}
3756
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003757void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3758 LocationSummary* locations =
3759 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3760
3761 switch (ror->GetResultType()) {
3762 case Primitive::kPrimInt:
3763 case Primitive::kPrimLong: {
3764 locations->SetInAt(0, Location::RequiresRegister());
3765 // The shift count needs to be in CL (unless it is a constant).
3766 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3767 locations->SetOut(Location::SameAsFirstInput());
3768 break;
3769 }
3770 default:
3771 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3772 UNREACHABLE();
3773 }
3774}
3775
3776void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3777 LocationSummary* locations = ror->GetLocations();
3778 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3779 Location second = locations->InAt(1);
3780
3781 switch (ror->GetResultType()) {
3782 case Primitive::kPrimInt:
3783 if (second.IsRegister()) {
3784 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3785 __ rorl(first_reg, second_reg);
3786 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003787 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003788 __ rorl(first_reg, imm);
3789 }
3790 break;
3791 case Primitive::kPrimLong:
3792 if (second.IsRegister()) {
3793 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3794 __ rorq(first_reg, second_reg);
3795 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003796 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003797 __ rorq(first_reg, imm);
3798 }
3799 break;
3800 default:
3801 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3802 UNREACHABLE();
3803 }
3804}
3805
Calin Juravle9aec02f2014-11-18 23:06:35 +00003806void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3807 HandleShift(shl);
3808}
3809
3810void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3811 HandleShift(shl);
3812}
3813
3814void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3815 HandleShift(shr);
3816}
3817
3818void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3819 HandleShift(shr);
3820}
3821
3822void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3823 HandleShift(ushr);
3824}
3825
3826void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3827 HandleShift(ushr);
3828}
3829
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003830void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003831 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003832 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003833 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003834 if (instruction->IsStringAlloc()) {
3835 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3836 } else {
3837 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3838 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3839 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003840 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003841}
3842
3843void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003844 // Note: if heap poisoning is enabled, the entry point takes cares
3845 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003846 if (instruction->IsStringAlloc()) {
3847 // String is allocated through StringFactory. Call NewEmptyString entry point.
3848 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07003849 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003850 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3851 __ call(Address(temp, code_offset.SizeValue()));
3852 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3853 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003854 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00003855 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3856 DCHECK(!codegen_->IsLeafMethod());
3857 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003858}
3859
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003860void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3861 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003862 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003863 InvokeRuntimeCallingConvention calling_convention;
3864 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003865 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003866 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003867 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003868}
3869
3870void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3871 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003872 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3873 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003874 // Note: if heap poisoning is enabled, the entry point takes cares
3875 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01003876 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003877 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003878
3879 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003880}
3881
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003882void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003883 LocationSummary* locations =
3884 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003885 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3886 if (location.IsStackSlot()) {
3887 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3888 } else if (location.IsDoubleStackSlot()) {
3889 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3890 }
3891 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003892}
3893
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003894void InstructionCodeGeneratorX86_64::VisitParameterValue(
3895 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003896 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003897}
3898
3899void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3900 LocationSummary* locations =
3901 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3902 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3903}
3904
3905void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3906 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3907 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003908}
3909
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003910void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3911 LocationSummary* locations =
3912 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3913 locations->SetInAt(0, Location::RequiresRegister());
3914 locations->SetOut(Location::RequiresRegister());
3915}
3916
3917void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3918 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00003919 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003920 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003921 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003922 __ movq(locations->Out().AsRegister<CpuRegister>(),
3923 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003924 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003925 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003926 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003927 __ movq(locations->Out().AsRegister<CpuRegister>(),
3928 Address(locations->InAt(0).AsRegister<CpuRegister>(),
3929 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003930 __ movq(locations->Out().AsRegister<CpuRegister>(),
3931 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003932 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003933}
3934
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003935void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003936 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003937 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003938 locations->SetInAt(0, Location::RequiresRegister());
3939 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003940}
3941
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003942void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
3943 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003944 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3945 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003946 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00003947 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003948 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003949 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003950 break;
3951
3952 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003953 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003954 break;
3955
3956 default:
3957 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
3958 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003959}
3960
David Brazdil66d126e2015-04-03 16:02:44 +01003961void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
3962 LocationSummary* locations =
3963 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
3964 locations->SetInAt(0, Location::RequiresRegister());
3965 locations->SetOut(Location::SameAsFirstInput());
3966}
3967
3968void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01003969 LocationSummary* locations = bool_not->GetLocations();
3970 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3971 locations->Out().AsRegister<CpuRegister>().AsRegister());
3972 Location out = locations->Out();
3973 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
3974}
3975
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003976void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003977 LocationSummary* locations =
3978 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003979 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003980 locations->SetInAt(i, Location::Any());
3981 }
3982 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003983}
3984
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003985void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003986 LOG(FATAL) << "Unimplemented";
3987}
3988
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003989void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00003990 /*
3991 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003992 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00003993 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
3994 */
3995 switch (kind) {
3996 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00003997 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00003998 break;
3999 }
4000 case MemBarrierKind::kAnyStore:
4001 case MemBarrierKind::kLoadAny:
4002 case MemBarrierKind::kStoreStore: {
4003 // nop
4004 break;
4005 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004006 case MemBarrierKind::kNTStoreStore:
4007 // Non-Temporal Store/Store needs an explicit fence.
4008 MemoryFence(/* non-temporal */ true);
4009 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004010 }
4011}
4012
4013void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4014 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4015
Roland Levillain0d5a2812015-11-13 10:07:31 +00004016 bool object_field_get_with_read_barrier =
4017 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004018 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004019 new (GetGraph()->GetArena()) LocationSummary(instruction,
4020 object_field_get_with_read_barrier ?
4021 LocationSummary::kCallOnSlowPath :
4022 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004023 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4024 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
4025 }
Calin Juravle52c48962014-12-16 17:02:57 +00004026 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004027 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4028 locations->SetOut(Location::RequiresFpuRegister());
4029 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004030 // The output overlaps for an object field get when read barriers
4031 // are enabled: we do not want the move to overwrite the object's
4032 // location, as we need it to emit the read barrier.
4033 locations->SetOut(
4034 Location::RequiresRegister(),
4035 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004036 }
Calin Juravle52c48962014-12-16 17:02:57 +00004037}
4038
4039void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4040 const FieldInfo& field_info) {
4041 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4042
4043 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004044 Location base_loc = locations->InAt(0);
4045 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004046 Location out = locations->Out();
4047 bool is_volatile = field_info.IsVolatile();
4048 Primitive::Type field_type = field_info.GetFieldType();
4049 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4050
4051 switch (field_type) {
4052 case Primitive::kPrimBoolean: {
4053 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4054 break;
4055 }
4056
4057 case Primitive::kPrimByte: {
4058 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4059 break;
4060 }
4061
4062 case Primitive::kPrimShort: {
4063 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4064 break;
4065 }
4066
4067 case Primitive::kPrimChar: {
4068 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4069 break;
4070 }
4071
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004072 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004073 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4074 break;
4075 }
4076
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004077 case Primitive::kPrimNot: {
4078 // /* HeapReference<Object> */ out = *(base + offset)
4079 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004080 // Note that a potential implicit null check is handled in this
4081 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4082 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004083 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004084 if (is_volatile) {
4085 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4086 }
4087 } else {
4088 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4089 codegen_->MaybeRecordImplicitNullCheck(instruction);
4090 if (is_volatile) {
4091 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4092 }
4093 // If read barriers are enabled, emit read barriers other than
4094 // Baker's using a slow path (and also unpoison the loaded
4095 // reference, if heap poisoning is enabled).
4096 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4097 }
4098 break;
4099 }
4100
Calin Juravle52c48962014-12-16 17:02:57 +00004101 case Primitive::kPrimLong: {
4102 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4103 break;
4104 }
4105
4106 case Primitive::kPrimFloat: {
4107 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4108 break;
4109 }
4110
4111 case Primitive::kPrimDouble: {
4112 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4113 break;
4114 }
4115
4116 case Primitive::kPrimVoid:
4117 LOG(FATAL) << "Unreachable type " << field_type;
4118 UNREACHABLE();
4119 }
4120
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004121 if (field_type == Primitive::kPrimNot) {
4122 // Potential implicit null checks, in the case of reference
4123 // fields, are handled in the previous switch statement.
4124 } else {
4125 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004126 }
Roland Levillain4d027112015-07-01 15:41:14 +01004127
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004128 if (is_volatile) {
4129 if (field_type == Primitive::kPrimNot) {
4130 // Memory barriers, in the case of references, are also handled
4131 // in the previous switch statement.
4132 } else {
4133 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4134 }
Roland Levillain4d027112015-07-01 15:41:14 +01004135 }
Calin Juravle52c48962014-12-16 17:02:57 +00004136}
4137
4138void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4139 const FieldInfo& field_info) {
4140 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4141
4142 LocationSummary* locations =
4143 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004144 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004145 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004146 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004147 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004148
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004149 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004150 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004151 if (is_volatile) {
4152 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4153 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4154 } else {
4155 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4156 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004157 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004158 if (is_volatile) {
4159 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4160 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4161 } else {
4162 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4163 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004164 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004165 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004166 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004167 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004168 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004169 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4170 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004171 locations->AddTemp(Location::RequiresRegister());
4172 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004173}
4174
Calin Juravle52c48962014-12-16 17:02:57 +00004175void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004176 const FieldInfo& field_info,
4177 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004178 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4179
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004180 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004181 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4182 Location value = locations->InAt(1);
4183 bool is_volatile = field_info.IsVolatile();
4184 Primitive::Type field_type = field_info.GetFieldType();
4185 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4186
4187 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004188 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004189 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004190
Mark Mendellea5af682015-10-22 17:35:49 -04004191 bool maybe_record_implicit_null_check_done = false;
4192
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004193 switch (field_type) {
4194 case Primitive::kPrimBoolean:
4195 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004196 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004197 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004198 __ movb(Address(base, offset), Immediate(v));
4199 } else {
4200 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4201 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004202 break;
4203 }
4204
4205 case Primitive::kPrimShort:
4206 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004207 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004208 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004209 __ movw(Address(base, offset), Immediate(v));
4210 } else {
4211 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4212 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004213 break;
4214 }
4215
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004216 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004217 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004218 if (value.IsConstant()) {
4219 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004220 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4221 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4222 // Note: if heap poisoning is enabled, no need to poison
4223 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004224 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004225 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004226 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4227 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4228 __ movl(temp, value.AsRegister<CpuRegister>());
4229 __ PoisonHeapReference(temp);
4230 __ movl(Address(base, offset), temp);
4231 } else {
4232 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4233 }
Mark Mendell40741f32015-04-20 22:10:34 -04004234 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004235 break;
4236 }
4237
4238 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004239 if (value.IsConstant()) {
4240 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004241 codegen_->MoveInt64ToAddress(Address(base, offset),
4242 Address(base, offset + sizeof(int32_t)),
4243 v,
4244 instruction);
4245 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004246 } else {
4247 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4248 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004249 break;
4250 }
4251
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004252 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004253 if (value.IsConstant()) {
4254 int32_t v =
4255 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4256 __ movl(Address(base, offset), Immediate(v));
4257 } else {
4258 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4259 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004260 break;
4261 }
4262
4263 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004264 if (value.IsConstant()) {
4265 int64_t v =
4266 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4267 codegen_->MoveInt64ToAddress(Address(base, offset),
4268 Address(base, offset + sizeof(int32_t)),
4269 v,
4270 instruction);
4271 maybe_record_implicit_null_check_done = true;
4272 } else {
4273 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4274 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004275 break;
4276 }
4277
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004278 case Primitive::kPrimVoid:
4279 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004280 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004281 }
Calin Juravle52c48962014-12-16 17:02:57 +00004282
Mark Mendellea5af682015-10-22 17:35:49 -04004283 if (!maybe_record_implicit_null_check_done) {
4284 codegen_->MaybeRecordImplicitNullCheck(instruction);
4285 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004286
4287 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4288 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4289 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004290 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004291 }
4292
Calin Juravle52c48962014-12-16 17:02:57 +00004293 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004294 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004295 }
4296}
4297
4298void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4299 HandleFieldSet(instruction, instruction->GetFieldInfo());
4300}
4301
4302void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004303 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004304}
4305
4306void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004307 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004308}
4309
4310void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004311 HandleFieldGet(instruction, instruction->GetFieldInfo());
4312}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004313
Calin Juravle52c48962014-12-16 17:02:57 +00004314void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4315 HandleFieldGet(instruction);
4316}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004317
Calin Juravle52c48962014-12-16 17:02:57 +00004318void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4319 HandleFieldGet(instruction, instruction->GetFieldInfo());
4320}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004321
Calin Juravle52c48962014-12-16 17:02:57 +00004322void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4323 HandleFieldSet(instruction, instruction->GetFieldInfo());
4324}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004325
Calin Juravle52c48962014-12-16 17:02:57 +00004326void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004327 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004328}
4329
Calin Juravlee460d1d2015-09-29 04:52:17 +01004330void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4331 HUnresolvedInstanceFieldGet* instruction) {
4332 FieldAccessCallingConventionX86_64 calling_convention;
4333 codegen_->CreateUnresolvedFieldLocationSummary(
4334 instruction, instruction->GetFieldType(), calling_convention);
4335}
4336
4337void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4338 HUnresolvedInstanceFieldGet* instruction) {
4339 FieldAccessCallingConventionX86_64 calling_convention;
4340 codegen_->GenerateUnresolvedFieldAccess(instruction,
4341 instruction->GetFieldType(),
4342 instruction->GetFieldIndex(),
4343 instruction->GetDexPc(),
4344 calling_convention);
4345}
4346
4347void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4348 HUnresolvedInstanceFieldSet* instruction) {
4349 FieldAccessCallingConventionX86_64 calling_convention;
4350 codegen_->CreateUnresolvedFieldLocationSummary(
4351 instruction, instruction->GetFieldType(), calling_convention);
4352}
4353
4354void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4355 HUnresolvedInstanceFieldSet* instruction) {
4356 FieldAccessCallingConventionX86_64 calling_convention;
4357 codegen_->GenerateUnresolvedFieldAccess(instruction,
4358 instruction->GetFieldType(),
4359 instruction->GetFieldIndex(),
4360 instruction->GetDexPc(),
4361 calling_convention);
4362}
4363
4364void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4365 HUnresolvedStaticFieldGet* instruction) {
4366 FieldAccessCallingConventionX86_64 calling_convention;
4367 codegen_->CreateUnresolvedFieldLocationSummary(
4368 instruction, instruction->GetFieldType(), calling_convention);
4369}
4370
4371void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4372 HUnresolvedStaticFieldGet* instruction) {
4373 FieldAccessCallingConventionX86_64 calling_convention;
4374 codegen_->GenerateUnresolvedFieldAccess(instruction,
4375 instruction->GetFieldType(),
4376 instruction->GetFieldIndex(),
4377 instruction->GetDexPc(),
4378 calling_convention);
4379}
4380
4381void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4382 HUnresolvedStaticFieldSet* instruction) {
4383 FieldAccessCallingConventionX86_64 calling_convention;
4384 codegen_->CreateUnresolvedFieldLocationSummary(
4385 instruction, instruction->GetFieldType(), calling_convention);
4386}
4387
4388void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4389 HUnresolvedStaticFieldSet* instruction) {
4390 FieldAccessCallingConventionX86_64 calling_convention;
4391 codegen_->GenerateUnresolvedFieldAccess(instruction,
4392 instruction->GetFieldType(),
4393 instruction->GetFieldIndex(),
4394 instruction->GetDexPc(),
4395 calling_convention);
4396}
4397
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004398void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko3b7537b2016-09-13 11:56:01 +00004399 LocationSummary* locations = codegen_->CreateNullCheckLocations(instruction);
4400 if (!codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
4401 // Explicit null checks can use any location.
4402 locations->SetInAt(0, Location::Any());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004403 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004404}
4405
Calin Juravle2ae48182016-03-16 14:05:09 +00004406void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4407 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004408 return;
4409 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004410 LocationSummary* locations = instruction->GetLocations();
4411 Location obj = locations->InAt(0);
4412
4413 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004414 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004415}
4416
Calin Juravle2ae48182016-03-16 14:05:09 +00004417void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004418 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004419 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004420
4421 LocationSummary* locations = instruction->GetLocations();
4422 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004423
4424 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004425 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004426 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004427 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004428 } else {
4429 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004430 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004431 __ jmp(slow_path->GetEntryLabel());
4432 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004433 }
4434 __ j(kEqual, slow_path->GetEntryLabel());
4435}
4436
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004437void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004438 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004439}
4440
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004441void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004442 bool object_array_get_with_read_barrier =
4443 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004444 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004445 new (GetGraph()->GetArena()) LocationSummary(instruction,
4446 object_array_get_with_read_barrier ?
4447 LocationSummary::kCallOnSlowPath :
4448 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004449 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4450 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
4451 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004452 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004453 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004454 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4455 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4456 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004457 // The output overlaps for an object array get when read barriers
4458 // are enabled: we do not want the move to overwrite the array's
4459 // location, as we need it to emit the read barrier.
4460 locations->SetOut(
4461 Location::RequiresRegister(),
4462 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004463 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004464}
4465
4466void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4467 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004468 Location obj_loc = locations->InAt(0);
4469 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004470 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004471 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004472 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004473
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004474 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004475 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004476 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004477 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004478 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004479 break;
4480 }
4481
4482 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004483 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004484 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004485 break;
4486 }
4487
4488 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004489 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004490 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004491 break;
4492 }
4493
4494 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004495 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004496 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004497 break;
4498 }
4499
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004500 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004501 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004502 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004503 break;
4504 }
4505
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004506 case Primitive::kPrimNot: {
4507 static_assert(
4508 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4509 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004510 // /* HeapReference<Object> */ out =
4511 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4512 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004513 // Note that a potential implicit null check is handled in this
4514 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4515 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004516 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004517 } else {
4518 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004519 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4520 codegen_->MaybeRecordImplicitNullCheck(instruction);
4521 // If read barriers are enabled, emit read barriers other than
4522 // Baker's using a slow path (and also unpoison the loaded
4523 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004524 if (index.IsConstant()) {
4525 uint32_t offset =
4526 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004527 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4528 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004529 codegen_->MaybeGenerateReadBarrierSlow(
4530 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4531 }
4532 }
4533 break;
4534 }
4535
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004536 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004537 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004538 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004539 break;
4540 }
4541
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004542 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004543 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004544 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004545 break;
4546 }
4547
4548 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004549 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004550 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004551 break;
4552 }
4553
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004554 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004555 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004556 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004557 }
Roland Levillain4d027112015-07-01 15:41:14 +01004558
4559 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004560 // Potential implicit null checks, in the case of reference
4561 // arrays, are handled in the previous switch statement.
4562 } else {
4563 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004564 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004565}
4566
4567void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004568 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004569
4570 bool needs_write_barrier =
4571 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004572 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004573
Nicolas Geoffray39468442014-09-02 15:17:15 +01004574 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004575 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004576 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004577 LocationSummary::kCallOnSlowPath :
4578 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004579
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004580 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004581 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4582 if (Primitive::IsFloatingPointType(value_type)) {
4583 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004584 } else {
4585 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4586 }
4587
4588 if (needs_write_barrier) {
4589 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004590 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004591 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004592 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004593}
4594
4595void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4596 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004597 Location array_loc = locations->InAt(0);
4598 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004599 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004600 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004601 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004602 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004603 bool needs_write_barrier =
4604 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004605 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4606 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4607 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004608
4609 switch (value_type) {
4610 case Primitive::kPrimBoolean:
4611 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004612 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004613 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004614 if (value.IsRegister()) {
4615 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004616 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004617 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004618 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004619 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004620 break;
4621 }
4622
4623 case Primitive::kPrimShort:
4624 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004625 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004626 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004627 if (value.IsRegister()) {
4628 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004629 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004630 DCHECK(value.IsConstant()) << value;
4631 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004632 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004633 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004634 break;
4635 }
4636
4637 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004638 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004639 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004640
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004641 if (!value.IsRegister()) {
4642 // Just setting null.
4643 DCHECK(instruction->InputAt(2)->IsNullConstant());
4644 DCHECK(value.IsConstant()) << value;
4645 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004646 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004647 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004648 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004649 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004650 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004651
4652 DCHECK(needs_write_barrier);
4653 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004654 // We cannot use a NearLabel for `done`, as its range may be too
4655 // short when Baker read barriers are enabled.
4656 Label done;
4657 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004658 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004659 Location temp_loc = locations->GetTemp(0);
4660 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004661 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004662 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4663 codegen_->AddSlowPath(slow_path);
4664 if (instruction->GetValueCanBeNull()) {
4665 __ testl(register_value, register_value);
4666 __ j(kNotEqual, &not_null);
4667 __ movl(address, Immediate(0));
4668 codegen_->MaybeRecordImplicitNullCheck(instruction);
4669 __ jmp(&done);
4670 __ Bind(&not_null);
4671 }
4672
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004673 // Note that when Baker read barriers are enabled, the type
4674 // checks are performed without read barriers. This is fine,
4675 // even in the case where a class object is in the from-space
4676 // after the flip, as a comparison involving such a type would
4677 // not produce a false positive; it may of course produce a
4678 // false negative, in which case we would take the ArraySet
4679 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004680
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004681 // /* HeapReference<Class> */ temp = array->klass_
4682 __ movl(temp, Address(array, class_offset));
4683 codegen_->MaybeRecordImplicitNullCheck(instruction);
4684 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004685
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004686 // /* HeapReference<Class> */ temp = temp->component_type_
4687 __ movl(temp, Address(temp, component_offset));
4688 // If heap poisoning is enabled, no need to unpoison `temp`
4689 // nor the object reference in `register_value->klass`, as
4690 // we are comparing two poisoned references.
4691 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004692
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004693 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4694 __ j(kEqual, &do_put);
4695 // If heap poisoning is enabled, the `temp` reference has
4696 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004697 __ MaybeUnpoisonHeapReference(temp);
4698
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004699 // If heap poisoning is enabled, no need to unpoison the
4700 // heap reference loaded below, as it is only used for a
4701 // comparison with null.
4702 __ cmpl(Address(temp, super_offset), Immediate(0));
4703 __ j(kNotEqual, slow_path->GetEntryLabel());
4704 __ Bind(&do_put);
4705 } else {
4706 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004707 }
4708 }
4709
4710 if (kPoisonHeapReferences) {
4711 __ movl(temp, register_value);
4712 __ PoisonHeapReference(temp);
4713 __ movl(address, temp);
4714 } else {
4715 __ movl(address, register_value);
4716 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004717 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004718 codegen_->MaybeRecordImplicitNullCheck(instruction);
4719 }
4720
4721 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4722 codegen_->MarkGCCard(
4723 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4724 __ Bind(&done);
4725
4726 if (slow_path != nullptr) {
4727 __ Bind(slow_path->GetExitLabel());
4728 }
4729
4730 break;
4731 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004732
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004733 case Primitive::kPrimInt: {
4734 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004735 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004736 if (value.IsRegister()) {
4737 __ movl(address, value.AsRegister<CpuRegister>());
4738 } else {
4739 DCHECK(value.IsConstant()) << value;
4740 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4741 __ movl(address, Immediate(v));
4742 }
4743 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004744 break;
4745 }
4746
4747 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004748 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004749 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004750 if (value.IsRegister()) {
4751 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004752 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004753 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004754 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004755 Address address_high =
4756 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004757 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004758 }
4759 break;
4760 }
4761
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004762 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004763 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004764 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004765 if (value.IsFpuRegister()) {
4766 __ movss(address, value.AsFpuRegister<XmmRegister>());
4767 } else {
4768 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004769 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04004770 __ movl(address, Immediate(v));
4771 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004772 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004773 break;
4774 }
4775
4776 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004777 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004778 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004779 if (value.IsFpuRegister()) {
4780 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4781 codegen_->MaybeRecordImplicitNullCheck(instruction);
4782 } else {
4783 int64_t v =
4784 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004785 Address address_high =
4786 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004787 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4788 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004789 break;
4790 }
4791
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004792 case Primitive::kPrimVoid:
4793 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004794 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004795 }
4796}
4797
4798void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004799 LocationSummary* locations =
4800 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004801 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04004802 if (!instruction->IsEmittedAtUseSite()) {
4803 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4804 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004805}
4806
4807void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04004808 if (instruction->IsEmittedAtUseSite()) {
4809 return;
4810 }
4811
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004812 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01004813 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00004814 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4815 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004816 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004817 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004818}
4819
4820void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004821 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4822 ? LocationSummary::kCallOnSlowPath
4823 : LocationSummary::kNoCall;
4824 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004825 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04004826 HInstruction* length = instruction->InputAt(1);
4827 if (!length->IsEmittedAtUseSite()) {
4828 locations->SetInAt(1, Location::RegisterOrConstant(length));
4829 }
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004830 if (instruction->HasUses()) {
4831 locations->SetOut(Location::SameAsFirstInput());
4832 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004833}
4834
4835void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4836 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004837 Location index_loc = locations->InAt(0);
4838 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04004839 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004840
Mark Mendell99dbd682015-04-22 16:18:52 -04004841 if (length_loc.IsConstant()) {
4842 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4843 if (index_loc.IsConstant()) {
4844 // BCE will remove the bounds check if we are guarenteed to pass.
4845 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4846 if (index < 0 || index >= length) {
4847 codegen_->AddSlowPath(slow_path);
4848 __ jmp(slow_path->GetEntryLabel());
4849 } else {
4850 // Some optimization after BCE may have generated this, and we should not
4851 // generate a bounds check if it is a valid range.
4852 }
4853 return;
4854 }
4855
4856 // We have to reverse the jump condition because the length is the constant.
4857 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
4858 __ cmpl(index_reg, Immediate(length));
4859 codegen_->AddSlowPath(slow_path);
4860 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05004861 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04004862 HInstruction* array_length = instruction->InputAt(1);
4863 if (array_length->IsEmittedAtUseSite()) {
4864 // Address the length field in the array.
4865 DCHECK(array_length->IsArrayLength());
4866 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
4867 Location array_loc = array_length->GetLocations()->InAt(0);
4868 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
4869 if (index_loc.IsConstant()) {
4870 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4871 __ cmpl(array_len, Immediate(value));
4872 } else {
4873 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
4874 }
4875 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendell99dbd682015-04-22 16:18:52 -04004876 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004877 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04004878 }
4879 codegen_->AddSlowPath(slow_path);
4880 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05004881 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004882}
4883
4884void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
4885 CpuRegister card,
4886 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004887 CpuRegister value,
4888 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004889 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004890 if (value_can_be_null) {
4891 __ testl(value, value);
4892 __ j(kEqual, &is_null);
4893 }
Andreas Gampe542451c2016-07-26 09:02:02 -07004894 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004895 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004896 __ movq(temp, object);
4897 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01004898 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004899 if (value_can_be_null) {
4900 __ Bind(&is_null);
4901 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004902}
4903
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004904void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004905 LOG(FATAL) << "Unimplemented";
4906}
4907
4908void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004909 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4910}
4911
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004912void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004913 LocationSummary* locations =
4914 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4915 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004916}
4917
4918void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01004919 HBasicBlock* block = instruction->GetBlock();
4920 if (block->GetLoopInformation() != nullptr) {
4921 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4922 // The back edge will generate the suspend check.
4923 return;
4924 }
4925 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4926 // The goto will generate the suspend check.
4927 return;
4928 }
4929 GenerateSuspendCheck(instruction, nullptr);
4930}
4931
4932void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
4933 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004934 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01004935 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
4936 if (slow_path == nullptr) {
4937 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
4938 instruction->SetSlowPath(slow_path);
4939 codegen_->AddSlowPath(slow_path);
4940 if (successor != nullptr) {
4941 DCHECK(successor->IsLoopHeader());
4942 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
4943 }
4944 } else {
4945 DCHECK_EQ(slow_path->GetSuccessor(), successor);
4946 }
4947
Andreas Gampe542451c2016-07-26 09:02:02 -07004948 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004949 /* no_rip */ true),
4950 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01004951 if (successor == nullptr) {
4952 __ j(kNotEqual, slow_path->GetEntryLabel());
4953 __ Bind(slow_path->GetReturnLabel());
4954 } else {
4955 __ j(kEqual, codegen_->GetLabelOf(successor));
4956 __ jmp(slow_path->GetEntryLabel());
4957 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004958}
4959
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004960X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
4961 return codegen_->GetAssembler();
4962}
4963
4964void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01004965 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004966 Location source = move->GetSource();
4967 Location destination = move->GetDestination();
4968
4969 if (source.IsRegister()) {
4970 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004971 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004972 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004973 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00004974 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004975 } else {
4976 DCHECK(destination.IsDoubleStackSlot());
4977 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00004978 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004979 }
4980 } else if (source.IsStackSlot()) {
4981 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004982 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004983 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004984 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004985 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004986 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004987 } else {
4988 DCHECK(destination.IsStackSlot());
4989 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
4990 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
4991 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004992 } else if (source.IsDoubleStackSlot()) {
4993 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004994 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004995 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004996 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00004997 __ movsd(destination.AsFpuRegister<XmmRegister>(),
4998 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004999 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005000 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005001 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5002 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5003 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005004 } else if (source.IsConstant()) {
5005 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005006 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5007 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005008 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005009 if (value == 0) {
5010 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5011 } else {
5012 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5013 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005014 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005015 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005016 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005017 }
5018 } else if (constant->IsLongConstant()) {
5019 int64_t value = constant->AsLongConstant()->GetValue();
5020 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005021 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005022 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005023 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005024 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005025 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005026 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005027 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005028 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005029 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005030 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005031 } else {
5032 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005033 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005034 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5035 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005036 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005037 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005038 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005039 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005040 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005041 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005042 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005043 } else {
5044 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005045 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005046 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005047 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005048 } else if (source.IsFpuRegister()) {
5049 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005050 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005051 } else if (destination.IsStackSlot()) {
5052 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005053 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005054 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005055 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005056 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005057 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005058 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005059 }
5060}
5061
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005062void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005063 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005064 __ movl(Address(CpuRegister(RSP), mem), reg);
5065 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005066}
5067
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005068void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005069 ScratchRegisterScope ensure_scratch(
5070 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5071
5072 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5073 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5074 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5075 Address(CpuRegister(RSP), mem2 + stack_offset));
5076 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5077 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5078 CpuRegister(ensure_scratch.GetRegister()));
5079}
5080
Mark Mendell8a1c7282015-06-29 15:41:28 -04005081void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5082 __ movq(CpuRegister(TMP), reg1);
5083 __ movq(reg1, reg2);
5084 __ movq(reg2, CpuRegister(TMP));
5085}
5086
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005087void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5088 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5089 __ movq(Address(CpuRegister(RSP), mem), reg);
5090 __ movq(reg, CpuRegister(TMP));
5091}
5092
5093void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5094 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005095 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005096
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005097 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5098 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5099 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5100 Address(CpuRegister(RSP), mem2 + stack_offset));
5101 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5102 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5103 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005104}
5105
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005106void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5107 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5108 __ movss(Address(CpuRegister(RSP), mem), reg);
5109 __ movd(reg, CpuRegister(TMP));
5110}
5111
5112void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5113 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5114 __ movsd(Address(CpuRegister(RSP), mem), reg);
5115 __ movd(reg, CpuRegister(TMP));
5116}
5117
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005118void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005119 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005120 Location source = move->GetSource();
5121 Location destination = move->GetDestination();
5122
5123 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005124 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005125 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005126 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005127 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005128 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005129 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005130 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5131 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005132 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005133 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005134 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005135 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5136 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005137 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005138 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5139 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5140 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005141 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005142 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005143 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005144 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005145 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005146 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005147 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005148 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005149 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005150 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005151 }
5152}
5153
5154
5155void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5156 __ pushq(CpuRegister(reg));
5157}
5158
5159
5160void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5161 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005162}
5163
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005164void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005165 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005166 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5167 Immediate(mirror::Class::kStatusInitialized));
5168 __ j(kLess, slow_path->GetEntryLabel());
5169 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005170 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005171}
5172
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005173HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5174 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005175 switch (desired_class_load_kind) {
5176 case HLoadClass::LoadKind::kReferrersClass:
5177 break;
5178 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5179 DCHECK(!GetCompilerOptions().GetCompilePic());
5180 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5181 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5182 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5183 DCHECK(GetCompilerOptions().GetCompilePic());
5184 break;
5185 case HLoadClass::LoadKind::kBootImageAddress:
5186 break;
5187 case HLoadClass::LoadKind::kDexCacheAddress:
5188 DCHECK(Runtime::Current()->UseJitCompilation());
5189 break;
5190 case HLoadClass::LoadKind::kDexCachePcRelative:
5191 DCHECK(!Runtime::Current()->UseJitCompilation());
5192 break;
5193 case HLoadClass::LoadKind::kDexCacheViaMethod:
5194 break;
5195 }
5196 return desired_class_load_kind;
5197}
5198
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005199void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005200 if (cls->NeedsAccessCheck()) {
5201 InvokeRuntimeCallingConvention calling_convention;
5202 CodeGenerator::CreateLoadClassLocationSummary(
5203 cls,
5204 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5205 Location::RegisterLocation(RAX),
5206 /* code_generator_supports_read_barrier */ true);
5207 return;
5208 }
5209
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005210 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5211 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005212 ? LocationSummary::kCallOnSlowPath
5213 : LocationSummary::kNoCall;
5214 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005215 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005216 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
5217 }
5218
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005219 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5220 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5221 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5222 locations->SetInAt(0, Location::RequiresRegister());
5223 }
5224 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005225}
5226
5227void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005228 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005229 if (cls->NeedsAccessCheck()) {
5230 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescuba45db02016-07-12 22:53:02 +01005231 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005232 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005233 return;
5234 }
5235
Roland Levillain0d5a2812015-11-13 10:07:31 +00005236 Location out_loc = locations->Out();
5237 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005238
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005239 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005240 bool generate_null_check = false;
5241 switch (cls->GetLoadKind()) {
5242 case HLoadClass::LoadKind::kReferrersClass: {
5243 DCHECK(!cls->CanCallRuntime());
5244 DCHECK(!cls->MustGenerateClinitCheck());
5245 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5246 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5247 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005248 cls,
5249 out_loc,
5250 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
5251 /*fixup_label*/nullptr,
5252 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005253 break;
5254 }
5255 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005256 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005257 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5258 codegen_->RecordTypePatch(cls);
5259 break;
5260 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005261 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005262 DCHECK_NE(cls->GetAddress(), 0u);
5263 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5264 __ movl(out, Immediate(address)); // Zero-extended.
5265 codegen_->RecordSimplePatch();
5266 break;
5267 }
5268 case HLoadClass::LoadKind::kDexCacheAddress: {
5269 DCHECK_NE(cls->GetAddress(), 0u);
5270 // /* GcRoot<mirror::Class> */ out = *address
5271 if (IsUint<32>(cls->GetAddress())) {
5272 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005273 GenerateGcRootFieldLoad(cls,
5274 out_loc,
5275 address,
5276 /*fixup_label*/nullptr,
5277 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005278 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005279 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5280 __ movq(out, Immediate(cls->GetAddress()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005281 GenerateGcRootFieldLoad(cls,
5282 out_loc,
5283 Address(out, 0),
5284 /*fixup_label*/nullptr,
5285 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005286 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005287 generate_null_check = !cls->IsInDexCache();
5288 break;
5289 }
5290 case HLoadClass::LoadKind::kDexCachePcRelative: {
5291 uint32_t offset = cls->GetDexCacheElementOffset();
5292 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5293 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5294 /* no_rip */ false);
5295 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005296 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005297 generate_null_check = !cls->IsInDexCache();
5298 break;
5299 }
5300 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5301 // /* GcRoot<mirror::Class>[] */ out =
5302 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5303 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5304 __ movq(out,
5305 Address(current_method,
5306 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5307 // /* GcRoot<mirror::Class> */ out = out[type_index]
5308 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005309 cls,
5310 out_loc,
5311 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
5312 /*fixup_label*/nullptr,
5313 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005314 generate_null_check = !cls->IsInDexCache();
5315 break;
5316 }
5317 default:
5318 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5319 UNREACHABLE();
5320 }
5321
5322 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5323 DCHECK(cls->CanCallRuntime());
5324 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5325 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5326 codegen_->AddSlowPath(slow_path);
5327 if (generate_null_check) {
5328 __ testl(out, out);
5329 __ j(kEqual, slow_path->GetEntryLabel());
5330 }
5331 if (cls->MustGenerateClinitCheck()) {
5332 GenerateClassInitializationCheck(slow_path, out);
5333 } else {
5334 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005335 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005336 }
5337}
5338
5339void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5340 LocationSummary* locations =
5341 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5342 locations->SetInAt(0, Location::RequiresRegister());
5343 if (check->HasUses()) {
5344 locations->SetOut(Location::SameAsFirstInput());
5345 }
5346}
5347
5348void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005349 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005350 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005351 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005352 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005353 GenerateClassInitializationCheck(slow_path,
5354 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005355}
5356
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005357HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5358 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005359 switch (desired_string_load_kind) {
5360 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5361 DCHECK(!GetCompilerOptions().GetCompilePic());
5362 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5363 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5364 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5365 DCHECK(GetCompilerOptions().GetCompilePic());
5366 break;
5367 case HLoadString::LoadKind::kBootImageAddress:
5368 break;
5369 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005370 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005371 break;
5372 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005373 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005374 break;
5375 case HLoadString::LoadKind::kDexCacheViaMethod:
5376 break;
5377 }
5378 return desired_string_load_kind;
5379}
5380
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005381void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005382 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
5383 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005384 : LocationSummary::kNoCall;
5385 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005386 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5387 locations->SetInAt(0, Location::RequiresRegister());
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005388 locations->SetOut(Location::RegisterLocation(RAX));
5389 } else {
5390 locations->SetOut(Location::RequiresRegister());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005391 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005392}
5393
5394void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005395 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005396 Location out_loc = locations->Out();
5397 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005398
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005399 switch (load->GetLoadKind()) {
5400 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005401 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5402 codegen_->RecordStringPatch(load);
5403 return; // No dex cache slow path.
5404 }
5405 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005406 DCHECK_NE(load->GetAddress(), 0u);
5407 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5408 __ movl(out, Immediate(address)); // Zero-extended.
5409 codegen_->RecordSimplePatch();
5410 return; // No dex cache slow path.
5411 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005412 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005413 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005414 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005415
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005416 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005417 InvokeRuntimeCallingConvention calling_convention;
5418 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
5419 Immediate(load->GetStringIndex()));
5420 codegen_->InvokeRuntime(kQuickResolveString,
5421 load,
5422 load->GetDexPc());
5423 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005424}
5425
David Brazdilcb1c0552015-08-04 16:22:25 +01005426static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005427 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005428 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005429}
5430
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005431void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5432 LocationSummary* locations =
5433 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5434 locations->SetOut(Location::RequiresRegister());
5435}
5436
5437void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005438 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5439}
5440
5441void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5442 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5443}
5444
5445void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5446 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005447}
5448
5449void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5450 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005451 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005452 InvokeRuntimeCallingConvention calling_convention;
5453 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5454}
5455
5456void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005457 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005458 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005459}
5460
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005461static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5462 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005463 !kUseBakerReadBarrier &&
5464 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005465 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5466 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5467}
5468
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005469void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005470 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005471 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005472 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005473 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005474 case TypeCheckKind::kExactCheck:
5475 case TypeCheckKind::kAbstractClassCheck:
5476 case TypeCheckKind::kClassHierarchyCheck:
5477 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005478 call_kind =
5479 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005480 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005481 break;
5482 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005483 case TypeCheckKind::kUnresolvedCheck:
5484 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005485 call_kind = LocationSummary::kCallOnSlowPath;
5486 break;
5487 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005488
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005489 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005490 if (baker_read_barrier_slow_path) {
5491 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
5492 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005493 locations->SetInAt(0, Location::RequiresRegister());
5494 locations->SetInAt(1, Location::Any());
5495 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5496 locations->SetOut(Location::RequiresRegister());
5497 // When read barriers are enabled, we need a temporary register for
5498 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005499 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005500 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005501 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005502}
5503
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005504void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005505 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005506 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005507 Location obj_loc = locations->InAt(0);
5508 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005509 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005510 Location out_loc = locations->Out();
5511 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005512 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005513 locations->GetTemp(0) :
5514 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005515 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005516 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5517 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5518 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005519 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005520 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005521
5522 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005523 // Avoid null check if we know obj is not null.
5524 if (instruction->MustDoNullCheck()) {
5525 __ testl(obj, obj);
5526 __ j(kEqual, &zero);
5527 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005528
Roland Levillain0d5a2812015-11-13 10:07:31 +00005529 // /* HeapReference<Class> */ out = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005530 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005531
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005532 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005533 case TypeCheckKind::kExactCheck: {
5534 if (cls.IsRegister()) {
5535 __ cmpl(out, cls.AsRegister<CpuRegister>());
5536 } else {
5537 DCHECK(cls.IsStackSlot()) << cls;
5538 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5539 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005540 if (zero.IsLinked()) {
5541 // Classes must be equal for the instanceof to succeed.
5542 __ j(kNotEqual, &zero);
5543 __ movl(out, Immediate(1));
5544 __ jmp(&done);
5545 } else {
5546 __ setcc(kEqual, out);
5547 // setcc only sets the low byte.
5548 __ andl(out, Immediate(1));
5549 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005550 break;
5551 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005552
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005553 case TypeCheckKind::kAbstractClassCheck: {
5554 // If the class is abstract, we eagerly fetch the super class of the
5555 // object to avoid doing a comparison we know will fail.
5556 NearLabel loop, success;
5557 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005558 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005559 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005560 __ testl(out, out);
5561 // If `out` is null, we use it for the result, and jump to `done`.
5562 __ j(kEqual, &done);
5563 if (cls.IsRegister()) {
5564 __ cmpl(out, cls.AsRegister<CpuRegister>());
5565 } else {
5566 DCHECK(cls.IsStackSlot()) << cls;
5567 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5568 }
5569 __ j(kNotEqual, &loop);
5570 __ movl(out, Immediate(1));
5571 if (zero.IsLinked()) {
5572 __ jmp(&done);
5573 }
5574 break;
5575 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005576
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005577 case TypeCheckKind::kClassHierarchyCheck: {
5578 // Walk over the class hierarchy to find a match.
5579 NearLabel loop, success;
5580 __ Bind(&loop);
5581 if (cls.IsRegister()) {
5582 __ cmpl(out, cls.AsRegister<CpuRegister>());
5583 } else {
5584 DCHECK(cls.IsStackSlot()) << cls;
5585 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5586 }
5587 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005588 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005589 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005590 __ testl(out, out);
5591 __ j(kNotEqual, &loop);
5592 // If `out` is null, we use it for the result, and jump to `done`.
5593 __ jmp(&done);
5594 __ Bind(&success);
5595 __ movl(out, Immediate(1));
5596 if (zero.IsLinked()) {
5597 __ jmp(&done);
5598 }
5599 break;
5600 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005601
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005602 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005603 // Do an exact check.
5604 NearLabel exact_check;
5605 if (cls.IsRegister()) {
5606 __ cmpl(out, cls.AsRegister<CpuRegister>());
5607 } else {
5608 DCHECK(cls.IsStackSlot()) << cls;
5609 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5610 }
5611 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005612 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005613 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005614 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005615 __ testl(out, out);
5616 // If `out` is null, we use it for the result, and jump to `done`.
5617 __ j(kEqual, &done);
5618 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5619 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005620 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005621 __ movl(out, Immediate(1));
5622 __ jmp(&done);
5623 break;
5624 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005625
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005626 case TypeCheckKind::kArrayCheck: {
5627 if (cls.IsRegister()) {
5628 __ cmpl(out, cls.AsRegister<CpuRegister>());
5629 } else {
5630 DCHECK(cls.IsStackSlot()) << cls;
5631 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5632 }
5633 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005634 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5635 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005636 codegen_->AddSlowPath(slow_path);
5637 __ j(kNotEqual, slow_path->GetEntryLabel());
5638 __ movl(out, Immediate(1));
5639 if (zero.IsLinked()) {
5640 __ jmp(&done);
5641 }
5642 break;
5643 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005644
Calin Juravle98893e12015-10-02 21:05:03 +01005645 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005646 case TypeCheckKind::kInterfaceCheck: {
5647 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005648 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005649 // cases.
5650 //
5651 // We cannot directly call the InstanceofNonTrivial runtime
5652 // entry point without resorting to a type checking slow path
5653 // here (i.e. by calling InvokeRuntime directly), as it would
5654 // require to assign fixed registers for the inputs of this
5655 // HInstanceOf instruction (following the runtime calling
5656 // convention), which might be cluttered by the potential first
5657 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005658 //
5659 // TODO: Introduce a new runtime entry point taking the object
5660 // to test (instead of its class) as argument, and let it deal
5661 // with the read barrier issues. This will let us refactor this
5662 // case of the `switch` code as it was previously (with a direct
5663 // call to the runtime not using a type checking slow path).
5664 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005665 DCHECK(locations->OnlyCallsOnSlowPath());
5666 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5667 /* is_fatal */ false);
5668 codegen_->AddSlowPath(slow_path);
5669 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005670 if (zero.IsLinked()) {
5671 __ jmp(&done);
5672 }
5673 break;
5674 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005675 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005676
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005677 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005678 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005679 __ xorl(out, out);
5680 }
5681
5682 if (done.IsLinked()) {
5683 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005684 }
5685
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005686 if (slow_path != nullptr) {
5687 __ Bind(slow_path->GetExitLabel());
5688 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005689}
5690
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005691void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005692 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5693 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005694 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5695 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005696 case TypeCheckKind::kExactCheck:
5697 case TypeCheckKind::kAbstractClassCheck:
5698 case TypeCheckKind::kClassHierarchyCheck:
5699 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005700 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5701 LocationSummary::kCallOnSlowPath :
5702 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005703 break;
5704 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005705 case TypeCheckKind::kUnresolvedCheck:
5706 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005707 call_kind = LocationSummary::kCallOnSlowPath;
5708 break;
5709 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005710 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5711 locations->SetInAt(0, Location::RequiresRegister());
5712 locations->SetInAt(1, Location::Any());
5713 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5714 locations->AddTemp(Location::RequiresRegister());
5715 // When read barriers are enabled, we need an additional temporary
5716 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005717 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005718 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005719 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005720}
5721
5722void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005723 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005724 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005725 Location obj_loc = locations->InAt(0);
5726 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005727 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005728 Location temp_loc = locations->GetTemp(0);
5729 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005730 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005731 locations->GetTemp(1) :
5732 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005733 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5734 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5735 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5736 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005737
Roland Levillain0d5a2812015-11-13 10:07:31 +00005738 bool is_type_check_slow_path_fatal =
5739 (type_check_kind == TypeCheckKind::kExactCheck ||
5740 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5741 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5742 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5743 !instruction->CanThrowIntoCatchBlock();
5744 SlowPathCode* type_check_slow_path =
5745 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5746 is_type_check_slow_path_fatal);
5747 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005748
Roland Levillain0d5a2812015-11-13 10:07:31 +00005749 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005750 case TypeCheckKind::kExactCheck:
5751 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005752 NearLabel done;
5753 // Avoid null check if we know obj is not null.
5754 if (instruction->MustDoNullCheck()) {
5755 __ testl(obj, obj);
5756 __ j(kEqual, &done);
5757 }
5758
5759 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005760 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005761
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005762 if (cls.IsRegister()) {
5763 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5764 } else {
5765 DCHECK(cls.IsStackSlot()) << cls;
5766 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5767 }
5768 // Jump to slow path for throwing the exception or doing a
5769 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005770 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005771 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005772 break;
5773 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005775 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005776 NearLabel done;
5777 // Avoid null check if we know obj is not null.
5778 if (instruction->MustDoNullCheck()) {
5779 __ testl(obj, obj);
5780 __ j(kEqual, &done);
5781 }
5782
5783 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005784 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005785
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005786 // If the class is abstract, we eagerly fetch the super class of the
5787 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005788 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005789 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005790 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005791 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005792
5793 // If the class reference currently in `temp` is not null, jump
5794 // to the `compare_classes` label to compare it with the checked
5795 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005796 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005797 __ j(kNotEqual, &compare_classes);
5798 // Otherwise, jump to the slow path to throw the exception.
5799 //
5800 // But before, move back the object's class into `temp` before
5801 // going into the slow path, as it has been overwritten in the
5802 // meantime.
5803 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005804 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005805 __ jmp(type_check_slow_path->GetEntryLabel());
5806
5807 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005808 if (cls.IsRegister()) {
5809 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5810 } else {
5811 DCHECK(cls.IsStackSlot()) << cls;
5812 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5813 }
5814 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005815 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005816 break;
5817 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005818
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005819 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005820 NearLabel done;
5821 // Avoid null check if we know obj is not null.
5822 if (instruction->MustDoNullCheck()) {
5823 __ testl(obj, obj);
5824 __ j(kEqual, &done);
5825 }
5826
5827 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005828 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005829
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005830 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005831 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005832 __ Bind(&loop);
5833 if (cls.IsRegister()) {
5834 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5835 } else {
5836 DCHECK(cls.IsStackSlot()) << cls;
5837 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5838 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005839 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840
Roland Levillain0d5a2812015-11-13 10:07:31 +00005841 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005842 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005843
5844 // If the class reference currently in `temp` is not null, jump
5845 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005846 __ testl(temp, temp);
5847 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005848 // Otherwise, jump to the slow path to throw the exception.
5849 //
5850 // But before, move back the object's class into `temp` before
5851 // going into the slow path, as it has been overwritten in the
5852 // meantime.
5853 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005854 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005855 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005856 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005857 break;
5858 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005859
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005860 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005861 // We cannot use a NearLabel here, as its range might be too
5862 // short in some cases when read barriers are enabled. This has
5863 // been observed for instance when the code emitted for this
5864 // case uses high x86-64 registers (R8-R15).
5865 Label done;
5866 // Avoid null check if we know obj is not null.
5867 if (instruction->MustDoNullCheck()) {
5868 __ testl(obj, obj);
5869 __ j(kEqual, &done);
5870 }
5871
5872 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005873 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005874
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005875 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005876 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005877 if (cls.IsRegister()) {
5878 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5879 } else {
5880 DCHECK(cls.IsStackSlot()) << cls;
5881 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5882 }
5883 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005884
5885 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005886 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005887 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005888
5889 // If the component type is not null (i.e. the object is indeed
5890 // an array), jump to label `check_non_primitive_component_type`
5891 // to further check that this component type is not a primitive
5892 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005893 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005894 __ j(kNotEqual, &check_non_primitive_component_type);
5895 // Otherwise, jump to the slow path to throw the exception.
5896 //
5897 // But before, move back the object's class into `temp` before
5898 // going into the slow path, as it has been overwritten in the
5899 // meantime.
5900 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005901 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005902 __ jmp(type_check_slow_path->GetEntryLabel());
5903
5904 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005905 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005906 __ j(kEqual, &done);
5907 // Same comment as above regarding `temp` and the slow path.
5908 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005909 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005910 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005911 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005912 break;
5913 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005914
Calin Juravle98893e12015-10-02 21:05:03 +01005915 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005916 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00005917 NearLabel done;
5918 // Avoid null check if we know obj is not null.
5919 if (instruction->MustDoNullCheck()) {
5920 __ testl(obj, obj);
5921 __ j(kEqual, &done);
5922 }
5923
5924 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005925 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005926
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005927 // We always go into the type check slow path for the unresolved
5928 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005929 //
5930 // We cannot directly call the CheckCast runtime entry point
5931 // without resorting to a type checking slow path here (i.e. by
5932 // calling InvokeRuntime directly), as it would require to
5933 // assign fixed registers for the inputs of this HInstanceOf
5934 // instruction (following the runtime calling convention), which
5935 // might be cluttered by the potential first read barrier
5936 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005937 //
5938 // TODO: Introduce a new runtime entry point taking the object
5939 // to test (instead of its class) as argument, and let it deal
5940 // with the read barrier issues. This will let us refactor this
5941 // case of the `switch` code as it was previously (with a direct
5942 // call to the runtime not using a type checking slow path).
5943 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005944 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005945 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005946 break;
5947 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005948
Roland Levillain0d5a2812015-11-13 10:07:31 +00005949 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005950}
5951
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005952void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
5953 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005954 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005955 InvokeRuntimeCallingConvention calling_convention;
5956 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5957}
5958
5959void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005960 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01005961 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01005962 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005963 if (instruction->IsEnter()) {
5964 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5965 } else {
5966 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5967 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005968}
5969
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005970void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
5971void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
5972void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
5973
5974void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5975 LocationSummary* locations =
5976 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5977 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
5978 || instruction->GetResultType() == Primitive::kPrimLong);
5979 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005980 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005981 locations->SetOut(Location::SameAsFirstInput());
5982}
5983
5984void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
5985 HandleBitwiseOperation(instruction);
5986}
5987
5988void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
5989 HandleBitwiseOperation(instruction);
5990}
5991
5992void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
5993 HandleBitwiseOperation(instruction);
5994}
5995
5996void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5997 LocationSummary* locations = instruction->GetLocations();
5998 Location first = locations->InAt(0);
5999 Location second = locations->InAt(1);
6000 DCHECK(first.Equals(locations->Out()));
6001
6002 if (instruction->GetResultType() == Primitive::kPrimInt) {
6003 if (second.IsRegister()) {
6004 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006005 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006006 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006007 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006008 } else {
6009 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006010 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006011 }
6012 } else if (second.IsConstant()) {
6013 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6014 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006015 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006016 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006017 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006018 } else {
6019 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006020 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006021 }
6022 } else {
6023 Address address(CpuRegister(RSP), second.GetStackIndex());
6024 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006025 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006026 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006027 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006028 } else {
6029 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006030 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006031 }
6032 }
6033 } else {
6034 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006035 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6036 bool second_is_constant = false;
6037 int64_t value = 0;
6038 if (second.IsConstant()) {
6039 second_is_constant = true;
6040 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006041 }
Mark Mendell40741f32015-04-20 22:10:34 -04006042 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006043
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006044 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006045 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006046 if (is_int32_value) {
6047 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6048 } else {
6049 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6050 }
6051 } else if (second.IsDoubleStackSlot()) {
6052 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006053 } else {
6054 __ andq(first_reg, second.AsRegister<CpuRegister>());
6055 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006056 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006057 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006058 if (is_int32_value) {
6059 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6060 } else {
6061 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6062 }
6063 } else if (second.IsDoubleStackSlot()) {
6064 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006065 } else {
6066 __ orq(first_reg, second.AsRegister<CpuRegister>());
6067 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006068 } else {
6069 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006070 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006071 if (is_int32_value) {
6072 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6073 } else {
6074 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6075 }
6076 } else if (second.IsDoubleStackSlot()) {
6077 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006078 } else {
6079 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6080 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006081 }
6082 }
6083}
6084
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006085void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6086 Location out,
6087 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006088 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006089 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6090 if (kEmitCompilerReadBarrier) {
6091 if (kUseBakerReadBarrier) {
6092 // Load with fast path based Baker's read barrier.
6093 // /* HeapReference<Object> */ out = *(out + offset)
6094 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006095 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006096 } else {
6097 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006098 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006099 // in the following move operation, as we will need it for the
6100 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006101 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006102 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006103 // /* HeapReference<Object> */ out = *(out + offset)
6104 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006105 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006106 }
6107 } else {
6108 // Plain load with no read barrier.
6109 // /* HeapReference<Object> */ out = *(out + offset)
6110 __ movl(out_reg, Address(out_reg, offset));
6111 __ MaybeUnpoisonHeapReference(out_reg);
6112 }
6113}
6114
6115void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6116 Location out,
6117 Location obj,
Vladimir Marko953437b2016-08-24 08:30:46 +00006118 uint32_t offset) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006119 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6120 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6121 if (kEmitCompilerReadBarrier) {
6122 if (kUseBakerReadBarrier) {
6123 // Load with fast path based Baker's read barrier.
6124 // /* HeapReference<Object> */ out = *(obj + offset)
6125 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006126 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006127 } else {
6128 // Load with slow path based read barrier.
6129 // /* HeapReference<Object> */ out = *(obj + offset)
6130 __ movl(out_reg, Address(obj_reg, offset));
6131 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6132 }
6133 } else {
6134 // Plain load with no read barrier.
6135 // /* HeapReference<Object> */ out = *(obj + offset)
6136 __ movl(out_reg, Address(obj_reg, offset));
6137 __ MaybeUnpoisonHeapReference(out_reg);
6138 }
6139}
6140
6141void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6142 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006143 const Address& address,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006144 Label* fixup_label,
6145 bool requires_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006146 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006147 if (requires_read_barrier) {
6148 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006149 if (kUseBakerReadBarrier) {
6150 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6151 // Baker's read barrier are used:
6152 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006153 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006154 // if (Thread::Current()->GetIsGcMarking()) {
6155 // root = ReadBarrier::Mark(root)
6156 // }
6157
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006158 // /* GcRoot<mirror::Object> */ root = *address
6159 __ movl(root_reg, address);
6160 if (fixup_label != nullptr) {
6161 __ Bind(fixup_label);
6162 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006163 static_assert(
6164 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6165 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6166 "have different sizes.");
6167 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6168 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6169 "have different sizes.");
6170
Vladimir Marko953437b2016-08-24 08:30:46 +00006171 // Slow path marking the GC root `root`.
6172 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6173 instruction, root, /* unpoison */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006174 codegen_->AddSlowPath(slow_path);
6175
Andreas Gampe542451c2016-07-26 09:02:02 -07006176 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006177 /* no_rip */ true),
6178 Immediate(0));
6179 __ j(kNotEqual, slow_path->GetEntryLabel());
6180 __ Bind(slow_path->GetExitLabel());
6181 } else {
6182 // GC root loaded through a slow path for read barriers other
6183 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006184 // /* GcRoot<mirror::Object>* */ root = address
6185 __ leaq(root_reg, address);
6186 if (fixup_label != nullptr) {
6187 __ Bind(fixup_label);
6188 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006189 // /* mirror::Object* */ root = root->Read()
6190 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6191 }
6192 } else {
6193 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006194 // /* GcRoot<mirror::Object> */ root = *address
6195 __ movl(root_reg, address);
6196 if (fixup_label != nullptr) {
6197 __ Bind(fixup_label);
6198 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006199 // Note that GC roots are not affected by heap poisoning, thus we
6200 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006201 }
6202}
6203
6204void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6205 Location ref,
6206 CpuRegister obj,
6207 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006208 bool needs_null_check) {
6209 DCHECK(kEmitCompilerReadBarrier);
6210 DCHECK(kUseBakerReadBarrier);
6211
6212 // /* HeapReference<Object> */ ref = *(obj + offset)
6213 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006214 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006215}
6216
6217void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6218 Location ref,
6219 CpuRegister obj,
6220 uint32_t data_offset,
6221 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006222 bool needs_null_check) {
6223 DCHECK(kEmitCompilerReadBarrier);
6224 DCHECK(kUseBakerReadBarrier);
6225
Roland Levillain3d312422016-06-23 13:53:42 +01006226 static_assert(
6227 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6228 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006229 // /* HeapReference<Object> */ ref =
6230 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006231 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006232 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006233}
6234
6235void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6236 Location ref,
6237 CpuRegister obj,
6238 const Address& src,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006239 bool needs_null_check) {
6240 DCHECK(kEmitCompilerReadBarrier);
6241 DCHECK(kUseBakerReadBarrier);
6242
6243 // In slow path based read barriers, the read barrier call is
6244 // inserted after the original load. However, in fast path based
6245 // Baker's read barriers, we need to perform the load of
6246 // mirror::Object::monitor_ *before* the original reference load.
6247 // This load-load ordering is required by the read barrier.
6248 // The fast path/slow path (for Baker's algorithm) should look like:
6249 //
6250 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6251 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6252 // HeapReference<Object> ref = *src; // Original reference load.
6253 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6254 // if (is_gray) {
6255 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6256 // }
6257 //
6258 // Note: the original implementation in ReadBarrier::Barrier is
6259 // slightly more complex as:
6260 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006261 // the high-bits of rb_state, which are expected to be all zeroes
6262 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6263 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006264 // - it performs additional checks that we do not do here for
6265 // performance reasons.
6266
6267 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006268 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6269
Vladimir Marko953437b2016-08-24 08:30:46 +00006270 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6271 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
6272 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
6273 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
6274 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6275 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6276 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6277
6278 // if (rb_state == ReadBarrier::gray_ptr_)
6279 // ref = ReadBarrier::Mark(ref);
6280 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6281 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006282 if (needs_null_check) {
6283 MaybeRecordImplicitNullCheck(instruction);
6284 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006285
6286 // Load fence to prevent load-load reordering.
6287 // Note that this is a no-op, thanks to the x86-64 memory model.
6288 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6289
6290 // The actual reference load.
6291 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006292 __ movl(ref_reg, src); // Flags are unaffected.
6293
6294 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6295 // Slow path marking the object `ref` when it is gray.
6296 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6297 instruction, ref, /* unpoison */ true);
6298 AddSlowPath(slow_path);
6299
6300 // We have done the "if" of the gray bit check above, now branch based on the flags.
6301 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006302
6303 // Object* ref = ref_addr->AsMirrorPtr()
6304 __ MaybeUnpoisonHeapReference(ref_reg);
6305
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006306 __ Bind(slow_path->GetExitLabel());
6307}
6308
6309void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6310 Location out,
6311 Location ref,
6312 Location obj,
6313 uint32_t offset,
6314 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006315 DCHECK(kEmitCompilerReadBarrier);
6316
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006317 // Insert a slow path based read barrier *after* the reference load.
6318 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006319 // If heap poisoning is enabled, the unpoisoning of the loaded
6320 // reference will be carried out by the runtime within the slow
6321 // path.
6322 //
6323 // Note that `ref` currently does not get unpoisoned (when heap
6324 // poisoning is enabled), which is alright as the `ref` argument is
6325 // not used by the artReadBarrierSlow entry point.
6326 //
6327 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6328 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6329 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6330 AddSlowPath(slow_path);
6331
Roland Levillain0d5a2812015-11-13 10:07:31 +00006332 __ jmp(slow_path->GetEntryLabel());
6333 __ Bind(slow_path->GetExitLabel());
6334}
6335
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006336void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6337 Location out,
6338 Location ref,
6339 Location obj,
6340 uint32_t offset,
6341 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006342 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006343 // Baker's read barriers shall be handled by the fast path
6344 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6345 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006346 // If heap poisoning is enabled, unpoisoning will be taken care of
6347 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006348 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006349 } else if (kPoisonHeapReferences) {
6350 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6351 }
6352}
6353
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006354void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6355 Location out,
6356 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006357 DCHECK(kEmitCompilerReadBarrier);
6358
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006359 // Insert a slow path based read barrier *after* the GC root load.
6360 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006361 // Note that GC roots are not affected by heap poisoning, so we do
6362 // not need to do anything special for this here.
6363 SlowPathCode* slow_path =
6364 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6365 AddSlowPath(slow_path);
6366
Roland Levillain0d5a2812015-11-13 10:07:31 +00006367 __ jmp(slow_path->GetEntryLabel());
6368 __ Bind(slow_path->GetExitLabel());
6369}
6370
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006371void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006372 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006373 LOG(FATAL) << "Unreachable";
6374}
6375
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006376void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006377 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006378 LOG(FATAL) << "Unreachable";
6379}
6380
Mark Mendellfe57faa2015-09-18 09:26:15 -04006381// Simple implementation of packed switch - generate cascaded compare/jumps.
6382void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6383 LocationSummary* locations =
6384 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6385 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006386 locations->AddTemp(Location::RequiresRegister());
6387 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006388}
6389
6390void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6391 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006392 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006393 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006394 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6395 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6396 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006397 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6398
6399 // Should we generate smaller inline compare/jumps?
6400 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6401 // Figure out the correct compare values and jump conditions.
6402 // Handle the first compare/branch as a special case because it might
6403 // jump to the default case.
6404 DCHECK_GT(num_entries, 2u);
6405 Condition first_condition;
6406 uint32_t index;
6407 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6408 if (lower_bound != 0) {
6409 first_condition = kLess;
6410 __ cmpl(value_reg_in, Immediate(lower_bound));
6411 __ j(first_condition, codegen_->GetLabelOf(default_block));
6412 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6413
6414 index = 1;
6415 } else {
6416 // Handle all the compare/jumps below.
6417 first_condition = kBelow;
6418 index = 0;
6419 }
6420
6421 // Handle the rest of the compare/jumps.
6422 for (; index + 1 < num_entries; index += 2) {
6423 int32_t compare_to_value = lower_bound + index + 1;
6424 __ cmpl(value_reg_in, Immediate(compare_to_value));
6425 // Jump to successors[index] if value < case_value[index].
6426 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6427 // Jump to successors[index + 1] if value == case_value[index + 1].
6428 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6429 }
6430
6431 if (index != num_entries) {
6432 // There are an odd number of entries. Handle the last one.
6433 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006434 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006435 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6436 }
6437
6438 // And the default for any other value.
6439 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6440 __ jmp(codegen_->GetLabelOf(default_block));
6441 }
6442 return;
6443 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006444
6445 // Remove the bias, if needed.
6446 Register value_reg_out = value_reg_in.AsRegister();
6447 if (lower_bound != 0) {
6448 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6449 value_reg_out = temp_reg.AsRegister();
6450 }
6451 CpuRegister value_reg(value_reg_out);
6452
6453 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006454 __ cmpl(value_reg, Immediate(num_entries - 1));
6455 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006456
Mark Mendell9c86b482015-09-18 13:36:07 -04006457 // We are in the range of the table.
6458 // Load the address of the jump table in the constant area.
6459 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006460
Mark Mendell9c86b482015-09-18 13:36:07 -04006461 // Load the (signed) offset from the jump table.
6462 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6463
6464 // Add the offset to the address of the table base.
6465 __ addq(temp_reg, base_reg);
6466
6467 // And jump.
6468 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006469}
6470
Aart Bikc5d47542016-01-27 17:00:35 -08006471void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6472 if (value == 0) {
6473 __ xorl(dest, dest);
6474 } else {
6475 __ movl(dest, Immediate(value));
6476 }
6477}
6478
Mark Mendell92e83bf2015-05-07 11:25:03 -04006479void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6480 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006481 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006482 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006483 } else if (IsUint<32>(value)) {
6484 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006485 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6486 } else {
6487 __ movq(dest, Immediate(value));
6488 }
6489}
6490
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006491void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6492 if (value == 0) {
6493 __ xorps(dest, dest);
6494 } else {
6495 __ movss(dest, LiteralInt32Address(value));
6496 }
6497}
6498
6499void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6500 if (value == 0) {
6501 __ xorpd(dest, dest);
6502 } else {
6503 __ movsd(dest, LiteralInt64Address(value));
6504 }
6505}
6506
6507void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6508 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6509}
6510
6511void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6512 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6513}
6514
Aart Bika19616e2016-02-01 18:57:58 -08006515void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6516 if (value == 0) {
6517 __ testl(dest, dest);
6518 } else {
6519 __ cmpl(dest, Immediate(value));
6520 }
6521}
6522
6523void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6524 if (IsInt<32>(value)) {
6525 if (value == 0) {
6526 __ testq(dest, dest);
6527 } else {
6528 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6529 }
6530 } else {
6531 // Value won't fit in an int.
6532 __ cmpq(dest, LiteralInt64Address(value));
6533 }
6534}
6535
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006536void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6537 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6538 if (rhs.IsConstant()) {
6539 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
6540 Compare32BitValue(lhs_reg, value);
6541 } else if (rhs.IsStackSlot()) {
6542 __ cmpl(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6543 } else {
6544 __ cmpl(lhs_reg, rhs.AsRegister<CpuRegister>());
6545 }
6546}
6547
6548void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6549 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6550 if (rhs.IsConstant()) {
6551 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6552 Compare64BitValue(lhs_reg, value);
6553 } else if (rhs.IsDoubleStackSlot()) {
6554 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6555 } else {
6556 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6557 }
6558}
6559
6560Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6561 Location index,
6562 ScaleFactor scale,
6563 uint32_t data_offset) {
6564 return index.IsConstant() ?
6565 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6566 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6567}
6568
Mark Mendellcfa410b2015-05-25 16:02:44 -04006569void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6570 DCHECK(dest.IsDoubleStackSlot());
6571 if (IsInt<32>(value)) {
6572 // Can move directly as an int32 constant.
6573 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6574 Immediate(static_cast<int32_t>(value)));
6575 } else {
6576 Load64BitValue(CpuRegister(TMP), value);
6577 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6578 }
6579}
6580
Mark Mendell9c86b482015-09-18 13:36:07 -04006581/**
6582 * Class to handle late fixup of offsets into constant area.
6583 */
6584class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6585 public:
6586 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6587 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6588
6589 protected:
6590 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6591
6592 CodeGeneratorX86_64* codegen_;
6593
6594 private:
6595 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6596 // Patch the correct offset for the instruction. We use the address of the
6597 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6598 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6599 int32_t relative_position = constant_offset - pos;
6600
6601 // Patch in the right value.
6602 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6603 }
6604
6605 // Location in constant area that the fixup refers to.
6606 size_t offset_into_constant_area_;
6607};
6608
6609/**
6610 t * Class to handle late fixup of offsets to a jump table that will be created in the
6611 * constant area.
6612 */
6613class JumpTableRIPFixup : public RIPFixup {
6614 public:
6615 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6616 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6617
6618 void CreateJumpTable() {
6619 X86_64Assembler* assembler = codegen_->GetAssembler();
6620
6621 // Ensure that the reference to the jump table has the correct offset.
6622 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6623 SetOffset(offset_in_constant_table);
6624
6625 // Compute the offset from the start of the function to this jump table.
6626 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6627
6628 // Populate the jump table with the correct values for the jump table.
6629 int32_t num_entries = switch_instr_->GetNumEntries();
6630 HBasicBlock* block = switch_instr_->GetBlock();
6631 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6632 // The value that we want is the target offset - the position of the table.
6633 for (int32_t i = 0; i < num_entries; i++) {
6634 HBasicBlock* b = successors[i];
6635 Label* l = codegen_->GetLabelOf(b);
6636 DCHECK(l->IsBound());
6637 int32_t offset_to_block = l->Position() - current_table_offset;
6638 assembler->AppendInt32(offset_to_block);
6639 }
6640 }
6641
6642 private:
6643 const HPackedSwitch* switch_instr_;
6644};
6645
Mark Mendellf55c3e02015-03-26 21:07:46 -04006646void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6647 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006648 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006649 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6650 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006651 assembler->Align(4, 0);
6652 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006653
6654 // Populate any jump tables.
6655 for (auto jump_table : fixups_to_jump_tables_) {
6656 jump_table->CreateJumpTable();
6657 }
6658
6659 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006660 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006661 }
6662
6663 // And finish up.
6664 CodeGenerator::Finalize(allocator);
6665}
6666
Mark Mendellf55c3e02015-03-26 21:07:46 -04006667Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6668 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6669 return Address::RIP(fixup);
6670}
6671
6672Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6673 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6674 return Address::RIP(fixup);
6675}
6676
6677Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6678 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6679 return Address::RIP(fixup);
6680}
6681
6682Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6683 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6684 return Address::RIP(fixup);
6685}
6686
Andreas Gampe85b62f22015-09-09 13:15:38 -07006687// TODO: trg as memory.
6688void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6689 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006690 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006691 return;
6692 }
6693
6694 DCHECK_NE(type, Primitive::kPrimVoid);
6695
6696 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6697 if (trg.Equals(return_loc)) {
6698 return;
6699 }
6700
6701 // Let the parallel move resolver take care of all of this.
6702 HParallelMove parallel_move(GetGraph()->GetArena());
6703 parallel_move.AddMove(return_loc, trg, type, nullptr);
6704 GetMoveResolver()->EmitNativeCode(&parallel_move);
6705}
6706
Mark Mendell9c86b482015-09-18 13:36:07 -04006707Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6708 // Create a fixup to be used to create and address the jump table.
6709 JumpTableRIPFixup* table_fixup =
6710 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6711
6712 // We have to populate the jump tables.
6713 fixups_to_jump_tables_.push_back(table_fixup);
6714 return Address::RIP(table_fixup);
6715}
6716
Mark Mendellea5af682015-10-22 17:35:49 -04006717void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6718 const Address& addr_high,
6719 int64_t v,
6720 HInstruction* instruction) {
6721 if (IsInt<32>(v)) {
6722 int32_t v_32 = v;
6723 __ movq(addr_low, Immediate(v_32));
6724 MaybeRecordImplicitNullCheck(instruction);
6725 } else {
6726 // Didn't fit in a register. Do it in pieces.
6727 int32_t low_v = Low32Bits(v);
6728 int32_t high_v = High32Bits(v);
6729 __ movl(addr_low, Immediate(low_v));
6730 MaybeRecordImplicitNullCheck(instruction);
6731 __ movl(addr_high, Immediate(high_v));
6732 }
6733}
6734
Roland Levillain4d027112015-07-01 15:41:14 +01006735#undef __
6736
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006737} // namespace x86_64
6738} // namespace art