blob: 2e28088db1eb64f7dcd4b762c7b9028493274e9b [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Calin Juravle175dc732015-08-25 15:42:32 +010056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400197 // Are we using an array length from memory?
198 HInstruction* array_length = instruction_->InputAt(1);
199 Location length_loc = locations->InAt(1);
200 InvokeRuntimeCallingConvention calling_convention;
201 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
202 // Load the array length into our temporary.
203 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
204 Location array_loc = array_length->GetLocations()->InAt(0);
205 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
206 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
207 // Check for conflicts with index.
208 if (length_loc.Equals(locations->InAt(0))) {
209 // We know we aren't using parameter 2.
210 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
211 }
212 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
213 }
214
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000215 // We're moving two locations to locations that could overlap, so we need a parallel
216 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000217 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100218 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000219 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100220 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400221 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100222 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
223 Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100224 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
225 ? QUICK_ENTRY_POINT(pThrowStringBounds)
226 : QUICK_ENTRY_POINT(pThrowArrayBounds);
227 x86_64_codegen->InvokeRuntime(entry_point_offset,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000228 instruction_,
229 instruction_->GetDexPc(),
230 this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100231 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000232 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100233 }
234
Alexandre Rames8158f282015-08-07 10:26:17 +0100235 bool IsFatal() const OVERRIDE { return true; }
236
Alexandre Rames9931f312015-06-19 14:47:01 +0100237 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
238
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100240 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
241};
242
Andreas Gampe85b62f22015-09-09 13:15:38 -0700243class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000245 LoadClassSlowPathX86_64(HLoadClass* cls,
246 HInstruction* at,
247 uint32_t dex_pc,
248 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000249 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000250 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
251 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000253 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000254 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000255 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100257
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000258 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000259
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000261 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000262 x86_64_codegen->InvokeRuntime(do_clinit_ ?
263 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
264 QUICK_ENTRY_POINT(pInitializeType),
265 at_,
266 dex_pc_,
267 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000268 if (do_clinit_) {
269 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
270 } else {
271 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
272 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000274 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000276 if (out.IsValid()) {
277 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000278 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000279 }
280
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000281 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100282 __ jmp(GetExitLabel());
283 }
284
Alexandre Rames9931f312015-06-19 14:47:01 +0100285 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
286
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100287 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000288 // The class this slow path will load.
289 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100290
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000291 // The instruction where this slow path is happening.
292 // (Might be the load class or an initialization check).
293 HInstruction* const at_;
294
295 // The dex PC of `at_`.
296 const uint32_t dex_pc_;
297
298 // Whether to initialize the class.
299 const bool do_clinit_;
300
301 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100302};
303
Andreas Gampe85b62f22015-09-09 13:15:38 -0700304class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000305 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000306 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000307
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000308 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 LocationSummary* locations = instruction_->GetLocations();
310 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
311
Roland Levillain0d5a2812015-11-13 10:07:31 +0000312 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000313 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000314 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000315
316 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000317 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
318 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000319 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
320 instruction_,
321 instruction_->GetDexPc(),
322 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000323 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000324 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000325 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000326 __ jmp(GetExitLabel());
327 }
328
Alexandre Rames9931f312015-06-19 14:47:01 +0100329 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
330
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000331 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000332 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
333};
334
Andreas Gampe85b62f22015-09-09 13:15:38 -0700335class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000337 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000338 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100342 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
343 : locations->Out();
344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000351 if (!is_fatal_) {
352 SaveLiveRegisters(codegen, locations);
353 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000354
355 // We're moving two locations to locations that could overlap, so we need a parallel
356 // move resolver.
357 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000358 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100359 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000360 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100361 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100362 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100363 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
364 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000365
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
368 instruction_,
369 dex_pc,
370 this);
371 CheckEntrypointTypes<
372 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000373 } else {
374 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000375 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
376 instruction_,
377 dex_pc,
378 this);
379 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000380 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000381
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000382 if (!is_fatal_) {
383 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000384 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000386
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 RestoreLiveRegisters(codegen, locations);
388 __ jmp(GetExitLabel());
389 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000390 }
391
Alexandre Rames9931f312015-06-19 14:47:01 +0100392 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
393
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000394 bool IsFatal() const OVERRIDE { return is_fatal_; }
395
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000397 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000398
399 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
400};
401
Andreas Gampe85b62f22015-09-09 13:15:38 -0700402class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 public:
Aart Bik42249c32016-01-07 15:33:50 -0800404 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000408 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000411 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800412 instruction_,
413 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000414 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000415 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Alexandre Rames9931f312015-06-19 14:47:01 +0100418 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
428 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
434 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
438 Primitive::kPrimNot,
439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
443 Primitive::kPrimInt,
444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
448 Primitive::kPrimNot,
449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
453 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
454 instruction_,
455 instruction_->GetDexPc(),
456 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000457 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100458 RestoreLiveRegisters(codegen, locations);
459 __ jmp(GetExitLabel());
460 }
461
462 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
463
464 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100465 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
466};
467
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468// Slow path marking an object during a read barrier.
469class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
470 public:
471 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000472 : SlowPathCode(instruction), out_(out), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000473 DCHECK(kEmitCompilerReadBarrier);
474 }
475
476 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
477
478 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
479 LocationSummary* locations = instruction_->GetLocations();
480 Register reg_out = out_.AsRegister<Register>();
481 DCHECK(locations->CanCall());
482 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
483 DCHECK(instruction_->IsInstanceFieldGet() ||
484 instruction_->IsStaticFieldGet() ||
485 instruction_->IsArrayGet() ||
486 instruction_->IsLoadClass() ||
487 instruction_->IsLoadString() ||
488 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100489 instruction_->IsCheckCast() ||
490 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
491 instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000492 << "Unexpected instruction in read barrier marking slow path: "
493 << instruction_->DebugName();
494
495 __ Bind(GetEntryLabel());
496 SaveLiveRegisters(codegen, locations);
497
498 InvokeRuntimeCallingConvention calling_convention;
499 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
500 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
501 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
502 instruction_,
503 instruction_->GetDexPc(),
504 this);
505 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
506 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
507
508 RestoreLiveRegisters(codegen, locations);
509 __ jmp(GetExitLabel());
510 }
511
512 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000513 const Location out_;
514 const Location obj_;
515
516 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
517};
518
Roland Levillain0d5a2812015-11-13 10:07:31 +0000519// Slow path generating a read barrier for a heap reference.
520class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
521 public:
522 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
523 Location out,
524 Location ref,
525 Location obj,
526 uint32_t offset,
527 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000528 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000529 out_(out),
530 ref_(ref),
531 obj_(obj),
532 offset_(offset),
533 index_(index) {
534 DCHECK(kEmitCompilerReadBarrier);
535 // If `obj` is equal to `out` or `ref`, it means the initial
536 // object has been overwritten by (or after) the heap object
537 // reference load to be instrumented, e.g.:
538 //
539 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000540 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000541 //
542 // In that case, we have lost the information about the original
543 // object, and the emitted read barrier cannot work properly.
544 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
545 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
546}
547
548 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
549 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
550 LocationSummary* locations = instruction_->GetLocations();
551 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
552 DCHECK(locations->CanCall());
553 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100554 DCHECK(instruction_->IsInstanceFieldGet() ||
555 instruction_->IsStaticFieldGet() ||
556 instruction_->IsArrayGet() ||
557 instruction_->IsInstanceOf() ||
558 instruction_->IsCheckCast() ||
559 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000560 instruction_->GetLocations()->Intrinsified()))
561 << "Unexpected instruction in read barrier for heap reference slow path: "
562 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000563
564 __ Bind(GetEntryLabel());
565 SaveLiveRegisters(codegen, locations);
566
567 // We may have to change the index's value, but as `index_` is a
568 // constant member (like other "inputs" of this slow path),
569 // introduce a copy of it, `index`.
570 Location index = index_;
571 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100572 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000573 if (instruction_->IsArrayGet()) {
574 // Compute real offset and store it in index_.
575 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
576 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
577 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
578 // We are about to change the value of `index_reg` (see the
579 // calls to art::x86_64::X86_64Assembler::shll and
580 // art::x86_64::X86_64Assembler::AddImmediate below), but it
581 // has not been saved by the previous call to
582 // art::SlowPathCode::SaveLiveRegisters, as it is a
583 // callee-save register --
584 // art::SlowPathCode::SaveLiveRegisters does not consider
585 // callee-save registers, as it has been designed with the
586 // assumption that callee-save registers are supposed to be
587 // handled by the called function. So, as a callee-save
588 // register, `index_reg` _would_ eventually be saved onto
589 // the stack, but it would be too late: we would have
590 // changed its value earlier. Therefore, we manually save
591 // it here into another freely available register,
592 // `free_reg`, chosen of course among the caller-save
593 // registers (as a callee-save `free_reg` register would
594 // exhibit the same problem).
595 //
596 // Note we could have requested a temporary register from
597 // the register allocator instead; but we prefer not to, as
598 // this is a slow path, and we know we can find a
599 // caller-save register that is available.
600 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
601 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
602 index_reg = free_reg;
603 index = Location::RegisterLocation(index_reg);
604 } else {
605 // The initial register stored in `index_` has already been
606 // saved in the call to art::SlowPathCode::SaveLiveRegisters
607 // (as it is not a callee-save register), so we can freely
608 // use it.
609 }
610 // Shifting the index value contained in `index_reg` by the
611 // scale factor (2) cannot overflow in practice, as the
612 // runtime is unable to allocate object arrays with a size
613 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
614 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
615 static_assert(
616 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
617 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
618 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
619 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100620 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
621 // intrinsics, `index_` is not shifted by a scale factor of 2
622 // (as in the case of ArrayGet), as it is actually an offset
623 // to an object field within an object.
624 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000625 DCHECK(instruction_->GetLocations()->Intrinsified());
626 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
627 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
628 << instruction_->AsInvoke()->GetIntrinsic();
629 DCHECK_EQ(offset_, 0U);
630 DCHECK(index_.IsRegister());
631 }
632 }
633
634 // We're moving two or three locations to locations that could
635 // overlap, so we need a parallel move resolver.
636 InvokeRuntimeCallingConvention calling_convention;
637 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
638 parallel_move.AddMove(ref_,
639 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
640 Primitive::kPrimNot,
641 nullptr);
642 parallel_move.AddMove(obj_,
643 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
644 Primitive::kPrimNot,
645 nullptr);
646 if (index.IsValid()) {
647 parallel_move.AddMove(index,
648 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
649 Primitive::kPrimInt,
650 nullptr);
651 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
652 } else {
653 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
654 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
655 }
656 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
657 instruction_,
658 instruction_->GetDexPc(),
659 this);
660 CheckEntrypointTypes<
661 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
662 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
663
664 RestoreLiveRegisters(codegen, locations);
665 __ jmp(GetExitLabel());
666 }
667
668 const char* GetDescription() const OVERRIDE {
669 return "ReadBarrierForHeapReferenceSlowPathX86_64";
670 }
671
672 private:
673 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
674 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
675 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
676 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
677 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
678 return static_cast<CpuRegister>(i);
679 }
680 }
681 // We shall never fail to find a free caller-save register, as
682 // there are more than two core caller-save registers on x86-64
683 // (meaning it is possible to find one which is different from
684 // `ref` and `obj`).
685 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
686 LOG(FATAL) << "Could not find a free caller-save register";
687 UNREACHABLE();
688 }
689
Roland Levillain0d5a2812015-11-13 10:07:31 +0000690 const Location out_;
691 const Location ref_;
692 const Location obj_;
693 const uint32_t offset_;
694 // An additional location containing an index to an array.
695 // Only used for HArrayGet and the UnsafeGetObject &
696 // UnsafeGetObjectVolatile intrinsics.
697 const Location index_;
698
699 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
700};
701
702// Slow path generating a read barrier for a GC root.
703class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
704 public:
705 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000706 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000707 DCHECK(kEmitCompilerReadBarrier);
708 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000709
710 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
711 LocationSummary* locations = instruction_->GetLocations();
712 DCHECK(locations->CanCall());
713 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000714 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
715 << "Unexpected instruction in read barrier for GC root slow path: "
716 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000717
718 __ Bind(GetEntryLabel());
719 SaveLiveRegisters(codegen, locations);
720
721 InvokeRuntimeCallingConvention calling_convention;
722 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
723 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
724 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
725 instruction_,
726 instruction_->GetDexPc(),
727 this);
728 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
729 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
730
731 RestoreLiveRegisters(codegen, locations);
732 __ jmp(GetExitLabel());
733 }
734
735 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
736
737 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000738 const Location out_;
739 const Location root_;
740
741 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
742};
743
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100744#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700745// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
746#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100747
Roland Levillain4fa13f62015-07-06 18:11:54 +0100748inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700749 switch (cond) {
750 case kCondEQ: return kEqual;
751 case kCondNE: return kNotEqual;
752 case kCondLT: return kLess;
753 case kCondLE: return kLessEqual;
754 case kCondGT: return kGreater;
755 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700756 case kCondB: return kBelow;
757 case kCondBE: return kBelowEqual;
758 case kCondA: return kAbove;
759 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700760 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100761 LOG(FATAL) << "Unreachable";
762 UNREACHABLE();
763}
764
Aart Bike9f37602015-10-09 11:15:55 -0700765// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100766inline Condition X86_64FPCondition(IfCondition cond) {
767 switch (cond) {
768 case kCondEQ: return kEqual;
769 case kCondNE: return kNotEqual;
770 case kCondLT: return kBelow;
771 case kCondLE: return kBelowEqual;
772 case kCondGT: return kAbove;
773 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700774 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100775 };
776 LOG(FATAL) << "Unreachable";
777 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700778}
779
Vladimir Markodc151b22015-10-15 18:02:30 +0100780HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
781 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
782 MethodReference target_method ATTRIBUTE_UNUSED) {
783 switch (desired_dispatch_info.code_ptr_location) {
784 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
785 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
786 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
787 return HInvokeStaticOrDirect::DispatchInfo {
788 desired_dispatch_info.method_load_kind,
789 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
790 desired_dispatch_info.method_load_data,
791 0u
792 };
793 default:
794 return desired_dispatch_info;
795 }
796}
797
Serguei Katkov288c7a82016-05-16 11:53:15 +0600798Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
799 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800800 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000801 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
802 switch (invoke->GetMethodLoadKind()) {
803 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
804 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000805 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000806 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000807 break;
808 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000809 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000810 break;
811 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
812 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
813 break;
814 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
815 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
816 method_patches_.emplace_back(invoke->GetTargetMethod());
817 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
818 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000819 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000820 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000821 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000822 // Bind a new fixup label at the end of the "movl" insn.
823 uint32_t offset = invoke->GetDexCacheArrayOffset();
824 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000825 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000826 }
Vladimir Marko58155012015-08-19 12:49:41 +0000827 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000828 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000829 Register method_reg;
830 CpuRegister reg = temp.AsRegister<CpuRegister>();
831 if (current_method.IsRegister()) {
832 method_reg = current_method.AsRegister<Register>();
833 } else {
834 DCHECK(invoke->GetLocations()->Intrinsified());
835 DCHECK(!current_method.IsValid());
836 method_reg = reg.AsRegister();
837 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
838 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000839 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100840 __ movq(reg,
841 Address(CpuRegister(method_reg),
842 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100843 // temp = temp[index_in_cache];
844 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
845 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000846 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
847 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100848 }
Vladimir Marko58155012015-08-19 12:49:41 +0000849 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600850 return callee_method;
851}
852
853void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
854 Location temp) {
855 // All registers are assumed to be correctly set up.
856 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000857
858 switch (invoke->GetCodePtrLocation()) {
859 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
860 __ call(&frame_entry_label_);
861 break;
862 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
863 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
864 Label* label = &relative_call_patches_.back().label;
865 __ call(label); // Bind to the patch label, override at link time.
866 __ Bind(label); // Bind the label at the end of the "call" insn.
867 break;
868 }
869 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
870 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100871 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
872 LOG(FATAL) << "Unsupported";
873 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000874 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
875 // (callee_method + offset_of_quick_compiled_code)()
876 __ call(Address(callee_method.AsRegister<CpuRegister>(),
877 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
878 kX86_64WordSize).SizeValue()));
879 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000880 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800881
882 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800883}
884
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000885void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
886 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
887 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
888 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000889
890 // Use the calling convention instead of the location of the receiver, as
891 // intrinsics may have put the receiver in a different register. In the intrinsics
892 // slow path, the arguments have been moved to the right place, so here we are
893 // guaranteed that the receiver is the first register of the calling convention.
894 InvokeDexCallingConvention calling_convention;
895 Register receiver = calling_convention.GetRegisterAt(0);
896
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000897 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000898 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000899 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000900 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000901 // Instead of simply (possibly) unpoisoning `temp` here, we should
902 // emit a read barrier for the previous class reference load.
903 // However this is not required in practice, as this is an
904 // intermediate/temporary reference and because the current
905 // concurrent copying collector keeps the from-space memory
906 // intact/accessible until the end of the marking phase (the
907 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000908 __ MaybeUnpoisonHeapReference(temp);
909 // temp = temp->GetMethodAt(method_offset);
910 __ movq(temp, Address(temp, method_offset));
911 // call temp->GetEntryPoint();
912 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
913 kX86_64WordSize).SizeValue()));
914}
915
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000916void CodeGeneratorX86_64::RecordSimplePatch() {
917 if (GetCompilerOptions().GetIncludePatchInformation()) {
918 simple_patches_.emplace_back();
919 __ Bind(&simple_patches_.back());
920 }
921}
922
923void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
924 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
925 __ Bind(&string_patches_.back().label);
926}
927
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100928void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
929 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
930 __ Bind(&type_patches_.back().label);
931}
932
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000933Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
934 uint32_t element_offset) {
935 // Add a patch entry and return the label.
936 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
937 return &pc_relative_dex_cache_patches_.back().label;
938}
939
Vladimir Marko58155012015-08-19 12:49:41 +0000940void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
941 DCHECK(linker_patches->empty());
942 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000943 method_patches_.size() +
944 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000945 pc_relative_dex_cache_patches_.size() +
946 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100947 string_patches_.size() +
948 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000949 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000950 // The label points to the end of the "movl" insn but the literal offset for method
951 // patch needs to point to the embedded constant which occupies the last 4 bytes.
952 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000953 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000954 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000955 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
956 info.target_method.dex_file,
957 info.target_method.dex_method_index));
958 }
959 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000960 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000961 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
962 info.target_method.dex_file,
963 info.target_method.dex_method_index));
964 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000965 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
966 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000967 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
968 &info.target_dex_file,
969 info.label.Position(),
970 info.element_offset));
971 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000972 for (const Label& label : simple_patches_) {
973 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
974 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
975 }
976 for (const StringPatchInfo<Label>& info : string_patches_) {
977 // These are always PC-relative, see GetSupportedLoadStringKind().
978 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
979 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
980 &info.dex_file,
981 info.label.Position(),
982 info.string_index));
983 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100984 for (const TypePatchInfo<Label>& info : type_patches_) {
985 // These are always PC-relative, see GetSupportedLoadClassKind().
986 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
987 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
988 &info.dex_file,
989 info.label.Position(),
990 info.type_index));
991 }
Vladimir Marko58155012015-08-19 12:49:41 +0000992}
993
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100994void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100995 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100996}
997
998void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100999 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001000}
1001
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001002size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1003 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1004 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001005}
1006
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001007size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1008 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1009 return kX86_64WordSize;
1010}
1011
1012size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1013 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1014 return kX86_64WordSize;
1015}
1016
1017size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1018 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1019 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001020}
1021
Calin Juravle175dc732015-08-25 15:42:32 +01001022void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1023 HInstruction* instruction,
1024 uint32_t dex_pc,
1025 SlowPathCode* slow_path) {
1026 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
1027 instruction,
1028 dex_pc,
1029 slow_path);
1030}
1031
1032void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +01001033 HInstruction* instruction,
1034 uint32_t dex_pc,
1035 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001036 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001037 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +01001038 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +01001039}
1040
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001041static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001042// Use a fake return address register to mimic Quick.
1043static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001044CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001045 const X86_64InstructionSetFeatures& isa_features,
1046 const CompilerOptions& compiler_options,
1047 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001048 : CodeGenerator(graph,
1049 kNumberOfCpuRegisters,
1050 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001051 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001052 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1053 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001054 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001055 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1056 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001057 compiler_options,
1058 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001059 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001060 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001061 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001062 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001063 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001064 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001065 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001066 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1067 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001068 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001069 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1070 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001071 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001072 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001073 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1074}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001075
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001076InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1077 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001078 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001079 assembler_(codegen->GetAssembler()),
1080 codegen_(codegen) {}
1081
David Brazdil58282f42016-01-14 12:45:10 +00001082void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001083 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001084 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001085
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001086 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001087 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001088}
1089
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001090static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001091 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001092}
David Srbecky9d8606d2015-04-12 09:35:32 +01001093
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001094static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001095 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001096}
1097
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001098void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001099 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001100 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001101 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001102 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001103 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001104
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001105 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001106 __ testq(CpuRegister(RAX), Address(
1107 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001108 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001109 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001110
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001111 if (HasEmptyFrame()) {
1112 return;
1113 }
1114
Nicolas Geoffray98893962015-01-21 12:32:32 +00001115 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001116 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001117 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001118 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001119 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1120 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001121 }
1122 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001123
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001124 int adjust = GetFrameSize() - GetCoreSpillSize();
1125 __ subq(CpuRegister(RSP), Immediate(adjust));
1126 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001127 uint32_t xmm_spill_location = GetFpuSpillStart();
1128 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001129
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001130 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1131 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001132 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1133 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1134 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001135 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001136 }
1137
Mathieu Chartiere401d142015-04-22 13:56:20 -07001138 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001139 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001140}
1141
1142void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001143 __ cfi().RememberState();
1144 if (!HasEmptyFrame()) {
1145 uint32_t xmm_spill_location = GetFpuSpillStart();
1146 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1147 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1148 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1149 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1150 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1151 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1152 }
1153 }
1154
1155 int adjust = GetFrameSize() - GetCoreSpillSize();
1156 __ addq(CpuRegister(RSP), Immediate(adjust));
1157 __ cfi().AdjustCFAOffset(-adjust);
1158
1159 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1160 Register reg = kCoreCalleeSaves[i];
1161 if (allocated_registers_.ContainsCoreRegister(reg)) {
1162 __ popq(CpuRegister(reg));
1163 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1164 __ cfi().Restore(DWARFReg(reg));
1165 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001166 }
1167 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001168 __ ret();
1169 __ cfi().RestoreState();
1170 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001171}
1172
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001173void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1174 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001175}
1176
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001177void CodeGeneratorX86_64::Move(Location destination, Location source) {
1178 if (source.Equals(destination)) {
1179 return;
1180 }
1181 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001182 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001183 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001184 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001185 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001186 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001187 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001188 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1189 } else if (source.IsConstant()) {
1190 HConstant* constant = source.GetConstant();
1191 if (constant->IsLongConstant()) {
1192 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1193 } else {
1194 Load32BitValue(dest, GetInt32ValueOf(constant));
1195 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001196 } else {
1197 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001198 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001199 }
1200 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001201 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001202 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001203 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001204 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001205 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1206 } else if (source.IsConstant()) {
1207 HConstant* constant = source.GetConstant();
1208 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1209 if (constant->IsFloatConstant()) {
1210 Load32BitValue(dest, static_cast<int32_t>(value));
1211 } else {
1212 Load64BitValue(dest, value);
1213 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001214 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001215 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001216 } else {
1217 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001218 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001219 }
1220 } else if (destination.IsStackSlot()) {
1221 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001222 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001223 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001224 } else if (source.IsFpuRegister()) {
1225 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001226 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001227 } else if (source.IsConstant()) {
1228 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001229 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001230 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001231 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001232 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001233 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1234 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001235 }
1236 } else {
1237 DCHECK(destination.IsDoubleStackSlot());
1238 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001239 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001240 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001241 } else if (source.IsFpuRegister()) {
1242 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001243 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001244 } else if (source.IsConstant()) {
1245 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001246 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001247 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001248 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001249 } else {
1250 DCHECK(constant->IsLongConstant());
1251 value = constant->AsLongConstant()->GetValue();
1252 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001253 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001254 } else {
1255 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001256 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1257 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001258 }
1259 }
1260}
1261
Calin Juravle175dc732015-08-25 15:42:32 +01001262void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1263 DCHECK(location.IsRegister());
1264 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1265}
1266
Calin Juravlee460d1d2015-09-29 04:52:17 +01001267void CodeGeneratorX86_64::MoveLocation(
1268 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1269 Move(dst, src);
1270}
1271
1272void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1273 if (location.IsRegister()) {
1274 locations->AddTemp(location);
1275 } else {
1276 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1277 }
1278}
1279
David Brazdilfc6a86a2015-06-26 10:33:45 +00001280void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001281 DCHECK(!successor->IsExitBlock());
1282
1283 HBasicBlock* block = got->GetBlock();
1284 HInstruction* previous = got->GetPrevious();
1285
1286 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001287 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001288 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1289 return;
1290 }
1291
1292 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1293 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1294 }
1295 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001296 __ jmp(codegen_->GetLabelOf(successor));
1297 }
1298}
1299
David Brazdilfc6a86a2015-06-26 10:33:45 +00001300void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1301 got->SetLocations(nullptr);
1302}
1303
1304void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1305 HandleGoto(got, got->GetSuccessor());
1306}
1307
1308void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1309 try_boundary->SetLocations(nullptr);
1310}
1311
1312void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1313 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1314 if (!successor->IsExitBlock()) {
1315 HandleGoto(try_boundary, successor);
1316 }
1317}
1318
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001319void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1320 exit->SetLocations(nullptr);
1321}
1322
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001323void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001324}
1325
Mark Mendell152408f2015-12-31 12:28:50 -05001326template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001327void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001328 LabelType* true_label,
1329 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001330 if (cond->IsFPConditionTrueIfNaN()) {
1331 __ j(kUnordered, true_label);
1332 } else if (cond->IsFPConditionFalseIfNaN()) {
1333 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001334 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001335 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001336}
1337
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001338void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001339 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001340
Mark Mendellc4701932015-04-10 13:18:51 -04001341 Location left = locations->InAt(0);
1342 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001343 Primitive::Type type = condition->InputAt(0)->GetType();
1344 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001345 case Primitive::kPrimBoolean:
1346 case Primitive::kPrimByte:
1347 case Primitive::kPrimChar:
1348 case Primitive::kPrimShort:
1349 case Primitive::kPrimInt:
1350 case Primitive::kPrimNot: {
1351 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1352 if (right.IsConstant()) {
1353 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1354 if (value == 0) {
1355 __ testl(left_reg, left_reg);
1356 } else {
1357 __ cmpl(left_reg, Immediate(value));
1358 }
1359 } else if (right.IsStackSlot()) {
1360 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1361 } else {
1362 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1363 }
1364 break;
1365 }
Mark Mendellc4701932015-04-10 13:18:51 -04001366 case Primitive::kPrimLong: {
1367 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1368 if (right.IsConstant()) {
1369 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001370 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001371 } else if (right.IsDoubleStackSlot()) {
1372 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1373 } else {
1374 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1375 }
Mark Mendellc4701932015-04-10 13:18:51 -04001376 break;
1377 }
1378 case Primitive::kPrimFloat: {
1379 if (right.IsFpuRegister()) {
1380 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1381 } else if (right.IsConstant()) {
1382 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1383 codegen_->LiteralFloatAddress(
1384 right.GetConstant()->AsFloatConstant()->GetValue()));
1385 } else {
1386 DCHECK(right.IsStackSlot());
1387 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1388 Address(CpuRegister(RSP), right.GetStackIndex()));
1389 }
Mark Mendellc4701932015-04-10 13:18:51 -04001390 break;
1391 }
1392 case Primitive::kPrimDouble: {
1393 if (right.IsFpuRegister()) {
1394 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1395 } else if (right.IsConstant()) {
1396 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1397 codegen_->LiteralDoubleAddress(
1398 right.GetConstant()->AsDoubleConstant()->GetValue()));
1399 } else {
1400 DCHECK(right.IsDoubleStackSlot());
1401 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1402 Address(CpuRegister(RSP), right.GetStackIndex()));
1403 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001404 break;
1405 }
1406 default:
1407 LOG(FATAL) << "Unexpected condition type " << type;
1408 }
1409}
1410
1411template<class LabelType>
1412void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1413 LabelType* true_target_in,
1414 LabelType* false_target_in) {
1415 // Generated branching requires both targets to be explicit. If either of the
1416 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1417 LabelType fallthrough_target;
1418 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1419 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1420
1421 // Generate the comparison to set the CC.
1422 GenerateCompareTest(condition);
1423
1424 // Now generate the correct jump(s).
1425 Primitive::Type type = condition->InputAt(0)->GetType();
1426 switch (type) {
1427 case Primitive::kPrimLong: {
1428 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1429 break;
1430 }
1431 case Primitive::kPrimFloat: {
1432 GenerateFPJumps(condition, true_target, false_target);
1433 break;
1434 }
1435 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001436 GenerateFPJumps(condition, true_target, false_target);
1437 break;
1438 }
1439 default:
1440 LOG(FATAL) << "Unexpected condition type " << type;
1441 }
1442
David Brazdil0debae72015-11-12 18:37:00 +00001443 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001444 __ jmp(false_target);
1445 }
David Brazdil0debae72015-11-12 18:37:00 +00001446
1447 if (fallthrough_target.IsLinked()) {
1448 __ Bind(&fallthrough_target);
1449 }
Mark Mendellc4701932015-04-10 13:18:51 -04001450}
1451
David Brazdil0debae72015-11-12 18:37:00 +00001452static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1453 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1454 // are set only strictly before `branch`. We can't use the eflags on long
1455 // conditions if they are materialized due to the complex branching.
1456 return cond->IsCondition() &&
1457 cond->GetNext() == branch &&
1458 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1459}
1460
Mark Mendell152408f2015-12-31 12:28:50 -05001461template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001462void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001463 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001464 LabelType* true_target,
1465 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001466 HInstruction* cond = instruction->InputAt(condition_input_index);
1467
1468 if (true_target == nullptr && false_target == nullptr) {
1469 // Nothing to do. The code always falls through.
1470 return;
1471 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001472 // Constant condition, statically compared against "true" (integer value 1).
1473 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001474 if (true_target != nullptr) {
1475 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001476 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001477 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001478 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001479 if (false_target != nullptr) {
1480 __ jmp(false_target);
1481 }
1482 }
1483 return;
1484 }
1485
1486 // The following code generates these patterns:
1487 // (1) true_target == nullptr && false_target != nullptr
1488 // - opposite condition true => branch to false_target
1489 // (2) true_target != nullptr && false_target == nullptr
1490 // - condition true => branch to true_target
1491 // (3) true_target != nullptr && false_target != nullptr
1492 // - condition true => branch to true_target
1493 // - branch to false_target
1494 if (IsBooleanValueOrMaterializedCondition(cond)) {
1495 if (AreEflagsSetFrom(cond, instruction)) {
1496 if (true_target == nullptr) {
1497 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1498 } else {
1499 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1500 }
1501 } else {
1502 // Materialized condition, compare against 0.
1503 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1504 if (lhs.IsRegister()) {
1505 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1506 } else {
1507 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1508 }
1509 if (true_target == nullptr) {
1510 __ j(kEqual, false_target);
1511 } else {
1512 __ j(kNotEqual, true_target);
1513 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001514 }
1515 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001516 // Condition has not been materialized, use its inputs as the
1517 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001518 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001519
David Brazdil0debae72015-11-12 18:37:00 +00001520 // If this is a long or FP comparison that has been folded into
1521 // the HCondition, generate the comparison directly.
1522 Primitive::Type type = condition->InputAt(0)->GetType();
1523 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1524 GenerateCompareTestAndBranch(condition, true_target, false_target);
1525 return;
1526 }
1527
1528 Location lhs = condition->GetLocations()->InAt(0);
1529 Location rhs = condition->GetLocations()->InAt(1);
1530 if (rhs.IsRegister()) {
1531 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1532 } else if (rhs.IsConstant()) {
1533 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001534 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001535 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001536 __ cmpl(lhs.AsRegister<CpuRegister>(),
1537 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1538 }
1539 if (true_target == nullptr) {
1540 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1541 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001542 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001543 }
Dave Allison20dfc792014-06-16 20:44:29 -07001544 }
David Brazdil0debae72015-11-12 18:37:00 +00001545
1546 // If neither branch falls through (case 3), the conditional branch to `true_target`
1547 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1548 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001549 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001550 }
1551}
1552
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001553void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001554 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1555 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001556 locations->SetInAt(0, Location::Any());
1557 }
1558}
1559
1560void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001561 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1562 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1563 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1564 nullptr : codegen_->GetLabelOf(true_successor);
1565 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1566 nullptr : codegen_->GetLabelOf(false_successor);
1567 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001568}
1569
1570void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1571 LocationSummary* locations = new (GetGraph()->GetArena())
1572 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001573 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001574 locations->SetInAt(0, Location::Any());
1575 }
1576}
1577
1578void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001579 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001580 GenerateTestAndBranch<Label>(deoptimize,
1581 /* condition_input_index */ 0,
1582 slow_path->GetEntryLabel(),
1583 /* false_target */ nullptr);
1584}
1585
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001586static bool SelectCanUseCMOV(HSelect* select) {
1587 // There are no conditional move instructions for XMMs.
1588 if (Primitive::IsFloatingPointType(select->GetType())) {
1589 return false;
1590 }
1591
1592 // A FP condition doesn't generate the single CC that we need.
1593 HInstruction* condition = select->GetCondition();
1594 if (condition->IsCondition() &&
1595 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1596 return false;
1597 }
1598
1599 // We can generate a CMOV for this Select.
1600 return true;
1601}
1602
David Brazdil74eb1b22015-12-14 11:44:01 +00001603void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1604 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1605 if (Primitive::IsFloatingPointType(select->GetType())) {
1606 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001607 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001608 } else {
1609 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001610 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001611 if (select->InputAt(1)->IsConstant()) {
1612 locations->SetInAt(1, Location::RequiresRegister());
1613 } else {
1614 locations->SetInAt(1, Location::Any());
1615 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001616 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001617 locations->SetInAt(1, Location::Any());
1618 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001619 }
1620 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1621 locations->SetInAt(2, Location::RequiresRegister());
1622 }
1623 locations->SetOut(Location::SameAsFirstInput());
1624}
1625
1626void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1627 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001628 if (SelectCanUseCMOV(select)) {
1629 // If both the condition and the source types are integer, we can generate
1630 // a CMOV to implement Select.
1631 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001632 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001633 DCHECK(locations->InAt(0).Equals(locations->Out()));
1634
1635 HInstruction* select_condition = select->GetCondition();
1636 Condition cond = kNotEqual;
1637
1638 // Figure out how to test the 'condition'.
1639 if (select_condition->IsCondition()) {
1640 HCondition* condition = select_condition->AsCondition();
1641 if (!condition->IsEmittedAtUseSite()) {
1642 // This was a previously materialized condition.
1643 // Can we use the existing condition code?
1644 if (AreEflagsSetFrom(condition, select)) {
1645 // Materialization was the previous instruction. Condition codes are right.
1646 cond = X86_64IntegerCondition(condition->GetCondition());
1647 } else {
1648 // No, we have to recreate the condition code.
1649 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1650 __ testl(cond_reg, cond_reg);
1651 }
1652 } else {
1653 GenerateCompareTest(condition);
1654 cond = X86_64IntegerCondition(condition->GetCondition());
1655 }
1656 } else {
1657 // Must be a boolean condition, which needs to be compared to 0.
1658 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1659 __ testl(cond_reg, cond_reg);
1660 }
1661
1662 // If the condition is true, overwrite the output, which already contains false.
1663 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001664 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1665 if (value_true_loc.IsRegister()) {
1666 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1667 } else {
1668 __ cmov(cond,
1669 value_false,
1670 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1671 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001672 } else {
1673 NearLabel false_target;
1674 GenerateTestAndBranch<NearLabel>(select,
1675 /* condition_input_index */ 2,
1676 /* true_target */ nullptr,
1677 &false_target);
1678 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1679 __ Bind(&false_target);
1680 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001681}
1682
David Srbecky0cf44932015-12-09 14:09:59 +00001683void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1684 new (GetGraph()->GetArena()) LocationSummary(info);
1685}
1686
David Srbeckyd28f4a02016-03-14 17:14:24 +00001687void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1688 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001689}
1690
1691void CodeGeneratorX86_64::GenerateNop() {
1692 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001693}
1694
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001695void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001696 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001697 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001698 // Handle the long/FP comparisons made in instruction simplification.
1699 switch (cond->InputAt(0)->GetType()) {
1700 case Primitive::kPrimLong:
1701 locations->SetInAt(0, Location::RequiresRegister());
1702 locations->SetInAt(1, Location::Any());
1703 break;
1704 case Primitive::kPrimFloat:
1705 case Primitive::kPrimDouble:
1706 locations->SetInAt(0, Location::RequiresFpuRegister());
1707 locations->SetInAt(1, Location::Any());
1708 break;
1709 default:
1710 locations->SetInAt(0, Location::RequiresRegister());
1711 locations->SetInAt(1, Location::Any());
1712 break;
1713 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001714 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001715 locations->SetOut(Location::RequiresRegister());
1716 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001717}
1718
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001719void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001720 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001721 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001722 }
Mark Mendellc4701932015-04-10 13:18:51 -04001723
1724 LocationSummary* locations = cond->GetLocations();
1725 Location lhs = locations->InAt(0);
1726 Location rhs = locations->InAt(1);
1727 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001728 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001729
1730 switch (cond->InputAt(0)->GetType()) {
1731 default:
1732 // Integer case.
1733
1734 // Clear output register: setcc only sets the low byte.
1735 __ xorl(reg, reg);
1736
1737 if (rhs.IsRegister()) {
1738 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1739 } else if (rhs.IsConstant()) {
1740 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001741 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001742 } else {
1743 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1744 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001745 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001746 return;
1747 case Primitive::kPrimLong:
1748 // Clear output register: setcc only sets the low byte.
1749 __ xorl(reg, reg);
1750
1751 if (rhs.IsRegister()) {
1752 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1753 } else if (rhs.IsConstant()) {
1754 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001755 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001756 } else {
1757 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1758 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001759 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001760 return;
1761 case Primitive::kPrimFloat: {
1762 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1763 if (rhs.IsConstant()) {
1764 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1765 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1766 } else if (rhs.IsStackSlot()) {
1767 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1768 } else {
1769 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1770 }
1771 GenerateFPJumps(cond, &true_label, &false_label);
1772 break;
1773 }
1774 case Primitive::kPrimDouble: {
1775 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1776 if (rhs.IsConstant()) {
1777 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1778 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1779 } else if (rhs.IsDoubleStackSlot()) {
1780 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1781 } else {
1782 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1783 }
1784 GenerateFPJumps(cond, &true_label, &false_label);
1785 break;
1786 }
1787 }
1788
1789 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001790 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001791
Roland Levillain4fa13f62015-07-06 18:11:54 +01001792 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001793 __ Bind(&false_label);
1794 __ xorl(reg, reg);
1795 __ jmp(&done_label);
1796
Roland Levillain4fa13f62015-07-06 18:11:54 +01001797 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001798 __ Bind(&true_label);
1799 __ movl(reg, Immediate(1));
1800 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001801}
1802
1803void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001804 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001805}
1806
1807void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001808 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001809}
1810
1811void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001812 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001813}
1814
1815void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001816 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001817}
1818
1819void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001820 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001821}
1822
1823void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001824 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001825}
1826
1827void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001828 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001829}
1830
1831void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001832 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001833}
1834
1835void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001836 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001837}
1838
1839void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001840 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001841}
1842
1843void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001844 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001845}
1846
1847void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001848 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001849}
1850
Aart Bike9f37602015-10-09 11:15:55 -07001851void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001852 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001853}
1854
1855void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001856 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001857}
1858
1859void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001860 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001861}
1862
1863void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001864 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001865}
1866
1867void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001868 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001869}
1870
1871void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001872 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001873}
1874
1875void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001876 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001877}
1878
1879void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001880 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001881}
1882
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001883void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001884 LocationSummary* locations =
1885 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001886 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001887 case Primitive::kPrimBoolean:
1888 case Primitive::kPrimByte:
1889 case Primitive::kPrimShort:
1890 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001891 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001892 case Primitive::kPrimLong: {
1893 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001894 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001895 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1896 break;
1897 }
1898 case Primitive::kPrimFloat:
1899 case Primitive::kPrimDouble: {
1900 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001901 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001902 locations->SetOut(Location::RequiresRegister());
1903 break;
1904 }
1905 default:
1906 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1907 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001908}
1909
1910void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001911 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001912 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001913 Location left = locations->InAt(0);
1914 Location right = locations->InAt(1);
1915
Mark Mendell0c9497d2015-08-21 09:30:05 -04001916 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001917 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001918 Condition less_cond = kLess;
1919
Calin Juravleddb7df22014-11-25 20:56:51 +00001920 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001921 case Primitive::kPrimBoolean:
1922 case Primitive::kPrimByte:
1923 case Primitive::kPrimShort:
1924 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001925 case Primitive::kPrimInt: {
1926 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1927 if (right.IsConstant()) {
1928 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1929 codegen_->Compare32BitValue(left_reg, value);
1930 } else if (right.IsStackSlot()) {
1931 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1932 } else {
1933 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1934 }
1935 break;
1936 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001937 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001938 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1939 if (right.IsConstant()) {
1940 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001941 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001942 } else if (right.IsDoubleStackSlot()) {
1943 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001944 } else {
1945 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1946 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001947 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001948 }
1949 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001950 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1951 if (right.IsConstant()) {
1952 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1953 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1954 } else if (right.IsStackSlot()) {
1955 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1956 } else {
1957 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1958 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001959 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001960 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001961 break;
1962 }
1963 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001964 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1965 if (right.IsConstant()) {
1966 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1967 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1968 } else if (right.IsDoubleStackSlot()) {
1969 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1970 } else {
1971 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1972 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001973 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001974 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001975 break;
1976 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001977 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001978 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001979 }
Aart Bika19616e2016-02-01 18:57:58 -08001980
Calin Juravleddb7df22014-11-25 20:56:51 +00001981 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001982 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001983 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001984
Calin Juravle91debbc2014-11-26 19:01:09 +00001985 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001986 __ movl(out, Immediate(1));
1987 __ jmp(&done);
1988
1989 __ Bind(&less);
1990 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001991
1992 __ Bind(&done);
1993}
1994
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001995void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001996 LocationSummary* locations =
1997 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001998 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001999}
2000
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002001void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002002 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002003}
2004
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002005void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2006 LocationSummary* locations =
2007 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2008 locations->SetOut(Location::ConstantLocation(constant));
2009}
2010
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002011void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002012 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002013}
2014
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002015void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002016 LocationSummary* locations =
2017 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002018 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002019}
2020
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002021void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002022 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002023}
2024
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002025void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2026 LocationSummary* locations =
2027 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2028 locations->SetOut(Location::ConstantLocation(constant));
2029}
2030
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002031void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002032 // Will be generated at use site.
2033}
2034
2035void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2036 LocationSummary* locations =
2037 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2038 locations->SetOut(Location::ConstantLocation(constant));
2039}
2040
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002041void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2042 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002043 // Will be generated at use site.
2044}
2045
Calin Juravle27df7582015-04-17 19:12:31 +01002046void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2047 memory_barrier->SetLocations(nullptr);
2048}
2049
2050void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002051 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002052}
2053
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002054void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2055 ret->SetLocations(nullptr);
2056}
2057
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002058void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002059 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002060}
2061
2062void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002063 LocationSummary* locations =
2064 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002065 switch (ret->InputAt(0)->GetType()) {
2066 case Primitive::kPrimBoolean:
2067 case Primitive::kPrimByte:
2068 case Primitive::kPrimChar:
2069 case Primitive::kPrimShort:
2070 case Primitive::kPrimInt:
2071 case Primitive::kPrimNot:
2072 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002073 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002074 break;
2075
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002076 case Primitive::kPrimFloat:
2077 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002078 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002079 break;
2080
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002081 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002082 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002083 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002084}
2085
2086void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2087 if (kIsDebugBuild) {
2088 switch (ret->InputAt(0)->GetType()) {
2089 case Primitive::kPrimBoolean:
2090 case Primitive::kPrimByte:
2091 case Primitive::kPrimChar:
2092 case Primitive::kPrimShort:
2093 case Primitive::kPrimInt:
2094 case Primitive::kPrimNot:
2095 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002096 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002097 break;
2098
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002099 case Primitive::kPrimFloat:
2100 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002101 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002102 XMM0);
2103 break;
2104
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002105 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002106 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002107 }
2108 }
2109 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002110}
2111
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002112Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2113 switch (type) {
2114 case Primitive::kPrimBoolean:
2115 case Primitive::kPrimByte:
2116 case Primitive::kPrimChar:
2117 case Primitive::kPrimShort:
2118 case Primitive::kPrimInt:
2119 case Primitive::kPrimNot:
2120 case Primitive::kPrimLong:
2121 return Location::RegisterLocation(RAX);
2122
2123 case Primitive::kPrimVoid:
2124 return Location::NoLocation();
2125
2126 case Primitive::kPrimDouble:
2127 case Primitive::kPrimFloat:
2128 return Location::FpuRegisterLocation(XMM0);
2129 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002130
2131 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002132}
2133
2134Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2135 return Location::RegisterLocation(kMethodRegisterArgument);
2136}
2137
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002138Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002139 switch (type) {
2140 case Primitive::kPrimBoolean:
2141 case Primitive::kPrimByte:
2142 case Primitive::kPrimChar:
2143 case Primitive::kPrimShort:
2144 case Primitive::kPrimInt:
2145 case Primitive::kPrimNot: {
2146 uint32_t index = gp_index_++;
2147 stack_index_++;
2148 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002149 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002150 } else {
2151 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2152 }
2153 }
2154
2155 case Primitive::kPrimLong: {
2156 uint32_t index = gp_index_;
2157 stack_index_ += 2;
2158 if (index < calling_convention.GetNumberOfRegisters()) {
2159 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002160 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002161 } else {
2162 gp_index_ += 2;
2163 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2164 }
2165 }
2166
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002167 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002168 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002169 stack_index_++;
2170 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002171 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002172 } else {
2173 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2174 }
2175 }
2176
2177 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002178 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002179 stack_index_ += 2;
2180 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002181 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002182 } else {
2183 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2184 }
2185 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002186
2187 case Primitive::kPrimVoid:
2188 LOG(FATAL) << "Unexpected parameter type " << type;
2189 break;
2190 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002191 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002192}
2193
Calin Juravle175dc732015-08-25 15:42:32 +01002194void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2195 // The trampoline uses the same calling convention as dex calling conventions,
2196 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2197 // the method_idx.
2198 HandleInvoke(invoke);
2199}
2200
2201void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2202 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2203}
2204
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002205void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002206 // Explicit clinit checks triggered by static invokes must have been pruned by
2207 // art::PrepareForRegisterAllocation.
2208 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002209
Mark Mendellfb8d2792015-03-31 22:16:59 -04002210 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002211 if (intrinsic.TryDispatch(invoke)) {
2212 return;
2213 }
2214
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002215 HandleInvoke(invoke);
2216}
2217
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002218static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2219 if (invoke->GetLocations()->Intrinsified()) {
2220 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2221 intrinsic.Dispatch(invoke);
2222 return true;
2223 }
2224 return false;
2225}
2226
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002227void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002228 // Explicit clinit checks triggered by static invokes must have been pruned by
2229 // art::PrepareForRegisterAllocation.
2230 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002231
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002232 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2233 return;
2234 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002235
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002236 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002237 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002238 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002239 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002240}
2241
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002242void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002243 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002244 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002245}
2246
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002247void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002248 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002249 if (intrinsic.TryDispatch(invoke)) {
2250 return;
2251 }
2252
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002253 HandleInvoke(invoke);
2254}
2255
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002256void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002257 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2258 return;
2259 }
2260
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002261 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002262 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002263 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002264}
2265
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002266void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2267 HandleInvoke(invoke);
2268 // Add the hidden argument.
2269 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2270}
2271
2272void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2273 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002274 LocationSummary* locations = invoke->GetLocations();
2275 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2276 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002277 Location receiver = locations->InAt(0);
2278 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2279
Roland Levillain0d5a2812015-11-13 10:07:31 +00002280 // Set the hidden argument. This is safe to do this here, as RAX
2281 // won't be modified thereafter, before the `call` instruction.
2282 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002283 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002284
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002285 if (receiver.IsStackSlot()) {
2286 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002287 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002288 __ movl(temp, Address(temp, class_offset));
2289 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002290 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002291 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002292 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002293 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002294 // Instead of simply (possibly) unpoisoning `temp` here, we should
2295 // emit a read barrier for the previous class reference load.
2296 // However this is not required in practice, as this is an
2297 // intermediate/temporary reference and because the current
2298 // concurrent copying collector keeps the from-space memory
2299 // intact/accessible until the end of the marking phase (the
2300 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002301 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002302 // temp = temp->GetAddressOfIMT()
2303 __ movq(temp,
2304 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2305 // temp = temp->GetImtEntryAt(method_offset);
2306 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
2307 invoke->GetImtIndex() % ImTable::kSize, kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002308 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002309 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002310 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002311 __ call(Address(temp,
2312 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002313
2314 DCHECK(!codegen_->IsLeafMethod());
2315 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2316}
2317
Roland Levillain88cb1752014-10-20 16:36:47 +01002318void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2319 LocationSummary* locations =
2320 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2321 switch (neg->GetResultType()) {
2322 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002323 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002324 locations->SetInAt(0, Location::RequiresRegister());
2325 locations->SetOut(Location::SameAsFirstInput());
2326 break;
2327
Roland Levillain88cb1752014-10-20 16:36:47 +01002328 case Primitive::kPrimFloat:
2329 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002330 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002331 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002332 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002333 break;
2334
2335 default:
2336 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2337 }
2338}
2339
2340void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2341 LocationSummary* locations = neg->GetLocations();
2342 Location out = locations->Out();
2343 Location in = locations->InAt(0);
2344 switch (neg->GetResultType()) {
2345 case Primitive::kPrimInt:
2346 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002347 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002348 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002349 break;
2350
2351 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002352 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002353 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002354 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002355 break;
2356
Roland Levillain5368c212014-11-27 15:03:41 +00002357 case Primitive::kPrimFloat: {
2358 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002359 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002360 // Implement float negation with an exclusive or with value
2361 // 0x80000000 (mask for bit 31, representing the sign of a
2362 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002363 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002364 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002365 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002366 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002367
Roland Levillain5368c212014-11-27 15:03:41 +00002368 case Primitive::kPrimDouble: {
2369 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002370 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002371 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002372 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002373 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002374 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002375 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002376 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002377 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002378
2379 default:
2380 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2381 }
2382}
2383
Roland Levillaindff1f282014-11-05 14:15:05 +00002384void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2385 LocationSummary* locations =
2386 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2387 Primitive::Type result_type = conversion->GetResultType();
2388 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002389 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002390
David Brazdilb2bd1c52015-03-25 11:17:37 +00002391 // The Java language does not allow treating boolean as an integral type but
2392 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002393
Roland Levillaindff1f282014-11-05 14:15:05 +00002394 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002395 case Primitive::kPrimByte:
2396 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002397 case Primitive::kPrimLong:
2398 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002399 case Primitive::kPrimBoolean:
2400 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002401 case Primitive::kPrimShort:
2402 case Primitive::kPrimInt:
2403 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002404 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002405 locations->SetInAt(0, Location::Any());
2406 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2407 break;
2408
2409 default:
2410 LOG(FATAL) << "Unexpected type conversion from " << input_type
2411 << " to " << result_type;
2412 }
2413 break;
2414
Roland Levillain01a8d712014-11-14 16:27:39 +00002415 case Primitive::kPrimShort:
2416 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002417 case Primitive::kPrimLong:
2418 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002419 case Primitive::kPrimBoolean:
2420 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002421 case Primitive::kPrimByte:
2422 case Primitive::kPrimInt:
2423 case Primitive::kPrimChar:
2424 // Processing a Dex `int-to-short' instruction.
2425 locations->SetInAt(0, Location::Any());
2426 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2427 break;
2428
2429 default:
2430 LOG(FATAL) << "Unexpected type conversion from " << input_type
2431 << " to " << result_type;
2432 }
2433 break;
2434
Roland Levillain946e1432014-11-11 17:35:19 +00002435 case Primitive::kPrimInt:
2436 switch (input_type) {
2437 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002438 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002439 locations->SetInAt(0, Location::Any());
2440 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2441 break;
2442
2443 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002444 // Processing a Dex `float-to-int' instruction.
2445 locations->SetInAt(0, Location::RequiresFpuRegister());
2446 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002447 break;
2448
Roland Levillain946e1432014-11-11 17:35:19 +00002449 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002450 // Processing a Dex `double-to-int' instruction.
2451 locations->SetInAt(0, Location::RequiresFpuRegister());
2452 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002453 break;
2454
2455 default:
2456 LOG(FATAL) << "Unexpected type conversion from " << input_type
2457 << " to " << result_type;
2458 }
2459 break;
2460
Roland Levillaindff1f282014-11-05 14:15:05 +00002461 case Primitive::kPrimLong:
2462 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002463 case Primitive::kPrimBoolean:
2464 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002465 case Primitive::kPrimByte:
2466 case Primitive::kPrimShort:
2467 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002468 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002469 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002470 // TODO: We would benefit from a (to-be-implemented)
2471 // Location::RegisterOrStackSlot requirement for this input.
2472 locations->SetInAt(0, Location::RequiresRegister());
2473 locations->SetOut(Location::RequiresRegister());
2474 break;
2475
2476 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002477 // Processing a Dex `float-to-long' instruction.
2478 locations->SetInAt(0, Location::RequiresFpuRegister());
2479 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002480 break;
2481
Roland Levillaindff1f282014-11-05 14:15:05 +00002482 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002483 // Processing a Dex `double-to-long' instruction.
2484 locations->SetInAt(0, Location::RequiresFpuRegister());
2485 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002486 break;
2487
2488 default:
2489 LOG(FATAL) << "Unexpected type conversion from " << input_type
2490 << " to " << result_type;
2491 }
2492 break;
2493
Roland Levillain981e4542014-11-14 11:47:14 +00002494 case Primitive::kPrimChar:
2495 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002496 case Primitive::kPrimLong:
2497 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002498 case Primitive::kPrimBoolean:
2499 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002500 case Primitive::kPrimByte:
2501 case Primitive::kPrimShort:
2502 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002503 // Processing a Dex `int-to-char' instruction.
2504 locations->SetInAt(0, Location::Any());
2505 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2506 break;
2507
2508 default:
2509 LOG(FATAL) << "Unexpected type conversion from " << input_type
2510 << " to " << result_type;
2511 }
2512 break;
2513
Roland Levillaindff1f282014-11-05 14:15:05 +00002514 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002515 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002516 case Primitive::kPrimBoolean:
2517 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002518 case Primitive::kPrimByte:
2519 case Primitive::kPrimShort:
2520 case Primitive::kPrimInt:
2521 case Primitive::kPrimChar:
2522 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002523 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002524 locations->SetOut(Location::RequiresFpuRegister());
2525 break;
2526
2527 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002528 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002529 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002530 locations->SetOut(Location::RequiresFpuRegister());
2531 break;
2532
Roland Levillaincff13742014-11-17 14:32:17 +00002533 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002534 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002535 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002536 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002537 break;
2538
2539 default:
2540 LOG(FATAL) << "Unexpected type conversion from " << input_type
2541 << " to " << result_type;
2542 };
2543 break;
2544
Roland Levillaindff1f282014-11-05 14:15:05 +00002545 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002546 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002547 case Primitive::kPrimBoolean:
2548 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002549 case Primitive::kPrimByte:
2550 case Primitive::kPrimShort:
2551 case Primitive::kPrimInt:
2552 case Primitive::kPrimChar:
2553 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002554 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002555 locations->SetOut(Location::RequiresFpuRegister());
2556 break;
2557
2558 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002559 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002560 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002561 locations->SetOut(Location::RequiresFpuRegister());
2562 break;
2563
Roland Levillaincff13742014-11-17 14:32:17 +00002564 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002565 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002566 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002567 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002568 break;
2569
2570 default:
2571 LOG(FATAL) << "Unexpected type conversion from " << input_type
2572 << " to " << result_type;
2573 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002574 break;
2575
2576 default:
2577 LOG(FATAL) << "Unexpected type conversion from " << input_type
2578 << " to " << result_type;
2579 }
2580}
2581
2582void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2583 LocationSummary* locations = conversion->GetLocations();
2584 Location out = locations->Out();
2585 Location in = locations->InAt(0);
2586 Primitive::Type result_type = conversion->GetResultType();
2587 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002588 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002589 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002590 case Primitive::kPrimByte:
2591 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002592 case Primitive::kPrimLong:
2593 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002594 case Primitive::kPrimBoolean:
2595 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002596 case Primitive::kPrimShort:
2597 case Primitive::kPrimInt:
2598 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002599 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002600 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002601 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002602 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002603 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002604 Address(CpuRegister(RSP), in.GetStackIndex()));
2605 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002606 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002607 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002608 }
2609 break;
2610
2611 default:
2612 LOG(FATAL) << "Unexpected type conversion from " << input_type
2613 << " to " << result_type;
2614 }
2615 break;
2616
Roland Levillain01a8d712014-11-14 16:27:39 +00002617 case Primitive::kPrimShort:
2618 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002619 case Primitive::kPrimLong:
2620 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002621 case Primitive::kPrimBoolean:
2622 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002623 case Primitive::kPrimByte:
2624 case Primitive::kPrimInt:
2625 case Primitive::kPrimChar:
2626 // Processing a Dex `int-to-short' instruction.
2627 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002628 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002629 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002630 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002631 Address(CpuRegister(RSP), in.GetStackIndex()));
2632 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002633 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002634 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002635 }
2636 break;
2637
2638 default:
2639 LOG(FATAL) << "Unexpected type conversion from " << input_type
2640 << " to " << result_type;
2641 }
2642 break;
2643
Roland Levillain946e1432014-11-11 17:35:19 +00002644 case Primitive::kPrimInt:
2645 switch (input_type) {
2646 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002647 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002648 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002649 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002650 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002651 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002652 Address(CpuRegister(RSP), in.GetStackIndex()));
2653 } else {
2654 DCHECK(in.IsConstant());
2655 DCHECK(in.GetConstant()->IsLongConstant());
2656 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002657 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002658 }
2659 break;
2660
Roland Levillain3f8f9362014-12-02 17:45:01 +00002661 case Primitive::kPrimFloat: {
2662 // Processing a Dex `float-to-int' instruction.
2663 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2664 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002665 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002666
2667 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002668 // if input >= (float)INT_MAX goto done
2669 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002670 __ j(kAboveEqual, &done);
2671 // if input == NaN goto nan
2672 __ j(kUnordered, &nan);
2673 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002674 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002675 __ jmp(&done);
2676 __ Bind(&nan);
2677 // output = 0
2678 __ xorl(output, output);
2679 __ Bind(&done);
2680 break;
2681 }
2682
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002683 case Primitive::kPrimDouble: {
2684 // Processing a Dex `double-to-int' instruction.
2685 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2686 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002687 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002688
2689 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002690 // if input >= (double)INT_MAX goto done
2691 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002692 __ j(kAboveEqual, &done);
2693 // if input == NaN goto nan
2694 __ j(kUnordered, &nan);
2695 // output = double-to-int-truncate(input)
2696 __ cvttsd2si(output, input);
2697 __ jmp(&done);
2698 __ Bind(&nan);
2699 // output = 0
2700 __ xorl(output, output);
2701 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002702 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002703 }
Roland Levillain946e1432014-11-11 17:35:19 +00002704
2705 default:
2706 LOG(FATAL) << "Unexpected type conversion from " << input_type
2707 << " to " << result_type;
2708 }
2709 break;
2710
Roland Levillaindff1f282014-11-05 14:15:05 +00002711 case Primitive::kPrimLong:
2712 switch (input_type) {
2713 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002714 case Primitive::kPrimBoolean:
2715 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002716 case Primitive::kPrimByte:
2717 case Primitive::kPrimShort:
2718 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002719 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002720 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002721 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002722 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002723 break;
2724
Roland Levillain624279f2014-12-04 11:54:28 +00002725 case Primitive::kPrimFloat: {
2726 // Processing a Dex `float-to-long' instruction.
2727 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2728 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002729 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002730
Mark Mendell92e83bf2015-05-07 11:25:03 -04002731 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002732 // if input >= (float)LONG_MAX goto done
2733 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002734 __ j(kAboveEqual, &done);
2735 // if input == NaN goto nan
2736 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002737 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002738 __ cvttss2si(output, input, true);
2739 __ jmp(&done);
2740 __ Bind(&nan);
2741 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002742 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002743 __ Bind(&done);
2744 break;
2745 }
2746
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002747 case Primitive::kPrimDouble: {
2748 // Processing a Dex `double-to-long' instruction.
2749 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2750 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002751 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002752
Mark Mendell92e83bf2015-05-07 11:25:03 -04002753 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002754 // if input >= (double)LONG_MAX goto done
2755 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002756 __ j(kAboveEqual, &done);
2757 // if input == NaN goto nan
2758 __ j(kUnordered, &nan);
2759 // output = double-to-long-truncate(input)
2760 __ cvttsd2si(output, input, true);
2761 __ jmp(&done);
2762 __ Bind(&nan);
2763 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002764 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002765 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002766 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002767 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002768
2769 default:
2770 LOG(FATAL) << "Unexpected type conversion from " << input_type
2771 << " to " << result_type;
2772 }
2773 break;
2774
Roland Levillain981e4542014-11-14 11:47:14 +00002775 case Primitive::kPrimChar:
2776 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002777 case Primitive::kPrimLong:
2778 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002779 case Primitive::kPrimBoolean:
2780 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002781 case Primitive::kPrimByte:
2782 case Primitive::kPrimShort:
2783 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002784 // Processing a Dex `int-to-char' instruction.
2785 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002786 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002787 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002788 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002789 Address(CpuRegister(RSP), in.GetStackIndex()));
2790 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002791 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002792 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002793 }
2794 break;
2795
2796 default:
2797 LOG(FATAL) << "Unexpected type conversion from " << input_type
2798 << " to " << result_type;
2799 }
2800 break;
2801
Roland Levillaindff1f282014-11-05 14:15:05 +00002802 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002803 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002804 case Primitive::kPrimBoolean:
2805 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002806 case Primitive::kPrimByte:
2807 case Primitive::kPrimShort:
2808 case Primitive::kPrimInt:
2809 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002810 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002811 if (in.IsRegister()) {
2812 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2813 } else if (in.IsConstant()) {
2814 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2815 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002816 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002817 } else {
2818 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2819 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2820 }
Roland Levillaincff13742014-11-17 14:32:17 +00002821 break;
2822
2823 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002824 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002825 if (in.IsRegister()) {
2826 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2827 } else if (in.IsConstant()) {
2828 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2829 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002830 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002831 } else {
2832 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2833 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2834 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002835 break;
2836
Roland Levillaincff13742014-11-17 14:32:17 +00002837 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002838 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002839 if (in.IsFpuRegister()) {
2840 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2841 } else if (in.IsConstant()) {
2842 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2843 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002844 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002845 } else {
2846 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2847 Address(CpuRegister(RSP), in.GetStackIndex()));
2848 }
Roland Levillaincff13742014-11-17 14:32:17 +00002849 break;
2850
2851 default:
2852 LOG(FATAL) << "Unexpected type conversion from " << input_type
2853 << " to " << result_type;
2854 };
2855 break;
2856
Roland Levillaindff1f282014-11-05 14:15:05 +00002857 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002858 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002859 case Primitive::kPrimBoolean:
2860 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002861 case Primitive::kPrimByte:
2862 case Primitive::kPrimShort:
2863 case Primitive::kPrimInt:
2864 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002865 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002866 if (in.IsRegister()) {
2867 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2868 } else if (in.IsConstant()) {
2869 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2870 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002871 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002872 } else {
2873 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2874 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2875 }
Roland Levillaincff13742014-11-17 14:32:17 +00002876 break;
2877
2878 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002879 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002880 if (in.IsRegister()) {
2881 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2882 } else if (in.IsConstant()) {
2883 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2884 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002885 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002886 } else {
2887 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2888 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2889 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002890 break;
2891
Roland Levillaincff13742014-11-17 14:32:17 +00002892 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002893 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002894 if (in.IsFpuRegister()) {
2895 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2896 } else if (in.IsConstant()) {
2897 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2898 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002899 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002900 } else {
2901 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2902 Address(CpuRegister(RSP), in.GetStackIndex()));
2903 }
Roland Levillaincff13742014-11-17 14:32:17 +00002904 break;
2905
2906 default:
2907 LOG(FATAL) << "Unexpected type conversion from " << input_type
2908 << " to " << result_type;
2909 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002910 break;
2911
2912 default:
2913 LOG(FATAL) << "Unexpected type conversion from " << input_type
2914 << " to " << result_type;
2915 }
2916}
2917
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002918void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002919 LocationSummary* locations =
2920 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002921 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002922 case Primitive::kPrimInt: {
2923 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002924 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2925 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002926 break;
2927 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002928
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002929 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002930 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002931 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002932 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002933 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002934 break;
2935 }
2936
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002937 case Primitive::kPrimDouble:
2938 case Primitive::kPrimFloat: {
2939 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002940 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002941 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002942 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002943 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002944
2945 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002946 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002947 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002948}
2949
2950void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2951 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002952 Location first = locations->InAt(0);
2953 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002954 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002955
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002956 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002957 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002958 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002959 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2960 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002961 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2962 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002963 } else {
2964 __ leal(out.AsRegister<CpuRegister>(), Address(
2965 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2966 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002967 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002968 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2969 __ addl(out.AsRegister<CpuRegister>(),
2970 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2971 } else {
2972 __ leal(out.AsRegister<CpuRegister>(), Address(
2973 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2974 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002975 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002976 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002977 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002978 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002979 break;
2980 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002981
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002982 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002983 if (second.IsRegister()) {
2984 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2985 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002986 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2987 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002988 } else {
2989 __ leaq(out.AsRegister<CpuRegister>(), Address(
2990 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2991 }
2992 } else {
2993 DCHECK(second.IsConstant());
2994 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2995 int32_t int32_value = Low32Bits(value);
2996 DCHECK_EQ(int32_value, value);
2997 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2998 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2999 } else {
3000 __ leaq(out.AsRegister<CpuRegister>(), Address(
3001 first.AsRegister<CpuRegister>(), int32_value));
3002 }
3003 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003004 break;
3005 }
3006
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003007 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003008 if (second.IsFpuRegister()) {
3009 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3010 } else if (second.IsConstant()) {
3011 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003012 codegen_->LiteralFloatAddress(
3013 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003014 } else {
3015 DCHECK(second.IsStackSlot());
3016 __ addss(first.AsFpuRegister<XmmRegister>(),
3017 Address(CpuRegister(RSP), second.GetStackIndex()));
3018 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003019 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003020 }
3021
3022 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003023 if (second.IsFpuRegister()) {
3024 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3025 } else if (second.IsConstant()) {
3026 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003027 codegen_->LiteralDoubleAddress(
3028 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003029 } else {
3030 DCHECK(second.IsDoubleStackSlot());
3031 __ addsd(first.AsFpuRegister<XmmRegister>(),
3032 Address(CpuRegister(RSP), second.GetStackIndex()));
3033 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003034 break;
3035 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003036
3037 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003038 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003039 }
3040}
3041
3042void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003043 LocationSummary* locations =
3044 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003045 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003046 case Primitive::kPrimInt: {
3047 locations->SetInAt(0, Location::RequiresRegister());
3048 locations->SetInAt(1, Location::Any());
3049 locations->SetOut(Location::SameAsFirstInput());
3050 break;
3051 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003052 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003053 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003054 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003055 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003056 break;
3057 }
Calin Juravle11351682014-10-23 15:38:15 +01003058 case Primitive::kPrimFloat:
3059 case Primitive::kPrimDouble: {
3060 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003061 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003062 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003063 break;
Calin Juravle11351682014-10-23 15:38:15 +01003064 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003065 default:
Calin Juravle11351682014-10-23 15:38:15 +01003066 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003067 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003068}
3069
3070void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3071 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003072 Location first = locations->InAt(0);
3073 Location second = locations->InAt(1);
3074 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003075 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003076 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003077 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003078 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003079 } else if (second.IsConstant()) {
3080 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003081 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003082 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003083 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003084 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003085 break;
3086 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003087 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003088 if (second.IsConstant()) {
3089 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3090 DCHECK(IsInt<32>(value));
3091 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3092 } else {
3093 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3094 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003095 break;
3096 }
3097
Calin Juravle11351682014-10-23 15:38:15 +01003098 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003099 if (second.IsFpuRegister()) {
3100 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3101 } else if (second.IsConstant()) {
3102 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003103 codegen_->LiteralFloatAddress(
3104 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003105 } else {
3106 DCHECK(second.IsStackSlot());
3107 __ subss(first.AsFpuRegister<XmmRegister>(),
3108 Address(CpuRegister(RSP), second.GetStackIndex()));
3109 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003110 break;
Calin Juravle11351682014-10-23 15:38:15 +01003111 }
3112
3113 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003114 if (second.IsFpuRegister()) {
3115 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3116 } else if (second.IsConstant()) {
3117 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003118 codegen_->LiteralDoubleAddress(
3119 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003120 } else {
3121 DCHECK(second.IsDoubleStackSlot());
3122 __ subsd(first.AsFpuRegister<XmmRegister>(),
3123 Address(CpuRegister(RSP), second.GetStackIndex()));
3124 }
Calin Juravle11351682014-10-23 15:38:15 +01003125 break;
3126 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003127
3128 default:
Calin Juravle11351682014-10-23 15:38:15 +01003129 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003130 }
3131}
3132
Calin Juravle34bacdf2014-10-07 20:23:36 +01003133void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3134 LocationSummary* locations =
3135 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3136 switch (mul->GetResultType()) {
3137 case Primitive::kPrimInt: {
3138 locations->SetInAt(0, Location::RequiresRegister());
3139 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003140 if (mul->InputAt(1)->IsIntConstant()) {
3141 // Can use 3 operand multiply.
3142 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3143 } else {
3144 locations->SetOut(Location::SameAsFirstInput());
3145 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003146 break;
3147 }
3148 case Primitive::kPrimLong: {
3149 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003150 locations->SetInAt(1, Location::Any());
3151 if (mul->InputAt(1)->IsLongConstant() &&
3152 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003153 // Can use 3 operand multiply.
3154 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3155 } else {
3156 locations->SetOut(Location::SameAsFirstInput());
3157 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003158 break;
3159 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003160 case Primitive::kPrimFloat:
3161 case Primitive::kPrimDouble: {
3162 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003163 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003164 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003165 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003166 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003167
3168 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003169 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003170 }
3171}
3172
3173void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3174 LocationSummary* locations = mul->GetLocations();
3175 Location first = locations->InAt(0);
3176 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003177 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003178 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003179 case Primitive::kPrimInt:
3180 // The constant may have ended up in a register, so test explicitly to avoid
3181 // problems where the output may not be the same as the first operand.
3182 if (mul->InputAt(1)->IsIntConstant()) {
3183 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3184 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3185 } else if (second.IsRegister()) {
3186 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003187 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003188 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003189 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003190 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003191 __ imull(first.AsRegister<CpuRegister>(),
3192 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003193 }
3194 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003195 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003196 // The constant may have ended up in a register, so test explicitly to avoid
3197 // problems where the output may not be the same as the first operand.
3198 if (mul->InputAt(1)->IsLongConstant()) {
3199 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3200 if (IsInt<32>(value)) {
3201 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3202 Immediate(static_cast<int32_t>(value)));
3203 } else {
3204 // Have to use the constant area.
3205 DCHECK(first.Equals(out));
3206 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3207 }
3208 } else if (second.IsRegister()) {
3209 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003210 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003211 } else {
3212 DCHECK(second.IsDoubleStackSlot());
3213 DCHECK(first.Equals(out));
3214 __ imulq(first.AsRegister<CpuRegister>(),
3215 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003216 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003217 break;
3218 }
3219
Calin Juravleb5bfa962014-10-21 18:02:24 +01003220 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003221 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003222 if (second.IsFpuRegister()) {
3223 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3224 } else if (second.IsConstant()) {
3225 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003226 codegen_->LiteralFloatAddress(
3227 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003228 } else {
3229 DCHECK(second.IsStackSlot());
3230 __ mulss(first.AsFpuRegister<XmmRegister>(),
3231 Address(CpuRegister(RSP), second.GetStackIndex()));
3232 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003233 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003234 }
3235
3236 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003237 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003238 if (second.IsFpuRegister()) {
3239 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3240 } else if (second.IsConstant()) {
3241 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003242 codegen_->LiteralDoubleAddress(
3243 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003244 } else {
3245 DCHECK(second.IsDoubleStackSlot());
3246 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3247 Address(CpuRegister(RSP), second.GetStackIndex()));
3248 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003249 break;
3250 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003251
3252 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003253 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003254 }
3255}
3256
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003257void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3258 uint32_t stack_adjustment, bool is_float) {
3259 if (source.IsStackSlot()) {
3260 DCHECK(is_float);
3261 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3262 } else if (source.IsDoubleStackSlot()) {
3263 DCHECK(!is_float);
3264 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3265 } else {
3266 // Write the value to the temporary location on the stack and load to FP stack.
3267 if (is_float) {
3268 Location stack_temp = Location::StackSlot(temp_offset);
3269 codegen_->Move(stack_temp, source);
3270 __ flds(Address(CpuRegister(RSP), temp_offset));
3271 } else {
3272 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3273 codegen_->Move(stack_temp, source);
3274 __ fldl(Address(CpuRegister(RSP), temp_offset));
3275 }
3276 }
3277}
3278
3279void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3280 Primitive::Type type = rem->GetResultType();
3281 bool is_float = type == Primitive::kPrimFloat;
3282 size_t elem_size = Primitive::ComponentSize(type);
3283 LocationSummary* locations = rem->GetLocations();
3284 Location first = locations->InAt(0);
3285 Location second = locations->InAt(1);
3286 Location out = locations->Out();
3287
3288 // Create stack space for 2 elements.
3289 // TODO: enhance register allocator to ask for stack temporaries.
3290 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3291
3292 // Load the values to the FP stack in reverse order, using temporaries if needed.
3293 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3294 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3295
3296 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003297 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003298 __ Bind(&retry);
3299 __ fprem();
3300
3301 // Move FP status to AX.
3302 __ fstsw();
3303
3304 // And see if the argument reduction is complete. This is signaled by the
3305 // C2 FPU flag bit set to 0.
3306 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3307 __ j(kNotEqual, &retry);
3308
3309 // We have settled on the final value. Retrieve it into an XMM register.
3310 // Store FP top of stack to real stack.
3311 if (is_float) {
3312 __ fsts(Address(CpuRegister(RSP), 0));
3313 } else {
3314 __ fstl(Address(CpuRegister(RSP), 0));
3315 }
3316
3317 // Pop the 2 items from the FP stack.
3318 __ fucompp();
3319
3320 // Load the value from the stack into an XMM register.
3321 DCHECK(out.IsFpuRegister()) << out;
3322 if (is_float) {
3323 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3324 } else {
3325 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3326 }
3327
3328 // And remove the temporary stack space we allocated.
3329 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3330}
3331
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003332void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3333 DCHECK(instruction->IsDiv() || instruction->IsRem());
3334
3335 LocationSummary* locations = instruction->GetLocations();
3336 Location second = locations->InAt(1);
3337 DCHECK(second.IsConstant());
3338
3339 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3340 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003341 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003342
3343 DCHECK(imm == 1 || imm == -1);
3344
3345 switch (instruction->GetResultType()) {
3346 case Primitive::kPrimInt: {
3347 if (instruction->IsRem()) {
3348 __ xorl(output_register, output_register);
3349 } else {
3350 __ movl(output_register, input_register);
3351 if (imm == -1) {
3352 __ negl(output_register);
3353 }
3354 }
3355 break;
3356 }
3357
3358 case Primitive::kPrimLong: {
3359 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003360 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003361 } else {
3362 __ movq(output_register, input_register);
3363 if (imm == -1) {
3364 __ negq(output_register);
3365 }
3366 }
3367 break;
3368 }
3369
3370 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003371 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003372 }
3373}
3374
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003375void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003376 LocationSummary* locations = instruction->GetLocations();
3377 Location second = locations->InAt(1);
3378
3379 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3380 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3381
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003382 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003383 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3384 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003385
3386 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3387
3388 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003389 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003390 __ testl(numerator, numerator);
3391 __ cmov(kGreaterEqual, tmp, numerator);
3392 int shift = CTZ(imm);
3393 __ sarl(tmp, Immediate(shift));
3394
3395 if (imm < 0) {
3396 __ negl(tmp);
3397 }
3398
3399 __ movl(output_register, tmp);
3400 } else {
3401 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3402 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3403
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003404 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003405 __ addq(rdx, numerator);
3406 __ testq(numerator, numerator);
3407 __ cmov(kGreaterEqual, rdx, numerator);
3408 int shift = CTZ(imm);
3409 __ sarq(rdx, Immediate(shift));
3410
3411 if (imm < 0) {
3412 __ negq(rdx);
3413 }
3414
3415 __ movq(output_register, rdx);
3416 }
3417}
3418
3419void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3420 DCHECK(instruction->IsDiv() || instruction->IsRem());
3421
3422 LocationSummary* locations = instruction->GetLocations();
3423 Location second = locations->InAt(1);
3424
3425 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3426 : locations->GetTemp(0).AsRegister<CpuRegister>();
3427 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3428 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3429 : locations->Out().AsRegister<CpuRegister>();
3430 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3431
3432 DCHECK_EQ(RAX, eax.AsRegister());
3433 DCHECK_EQ(RDX, edx.AsRegister());
3434 if (instruction->IsDiv()) {
3435 DCHECK_EQ(RAX, out.AsRegister());
3436 } else {
3437 DCHECK_EQ(RDX, out.AsRegister());
3438 }
3439
3440 int64_t magic;
3441 int shift;
3442
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003443 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003444 if (instruction->GetResultType() == Primitive::kPrimInt) {
3445 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3446
3447 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3448
3449 __ movl(numerator, eax);
3450
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003451 __ movl(eax, Immediate(magic));
3452 __ imull(numerator);
3453
3454 if (imm > 0 && magic < 0) {
3455 __ addl(edx, numerator);
3456 } else if (imm < 0 && magic > 0) {
3457 __ subl(edx, numerator);
3458 }
3459
3460 if (shift != 0) {
3461 __ sarl(edx, Immediate(shift));
3462 }
3463
3464 __ movl(eax, edx);
3465 __ shrl(edx, Immediate(31));
3466 __ addl(edx, eax);
3467
3468 if (instruction->IsRem()) {
3469 __ movl(eax, numerator);
3470 __ imull(edx, Immediate(imm));
3471 __ subl(eax, edx);
3472 __ movl(edx, eax);
3473 } else {
3474 __ movl(eax, edx);
3475 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003476 } else {
3477 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3478
3479 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3480
3481 CpuRegister rax = eax;
3482 CpuRegister rdx = edx;
3483
3484 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3485
3486 // Save the numerator.
3487 __ movq(numerator, rax);
3488
3489 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003490 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003491
3492 // RDX:RAX = magic * numerator
3493 __ imulq(numerator);
3494
3495 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003496 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003497 __ addq(rdx, numerator);
3498 } else if (imm < 0 && magic > 0) {
3499 // RDX -= numerator
3500 __ subq(rdx, numerator);
3501 }
3502
3503 // Shift if needed.
3504 if (shift != 0) {
3505 __ sarq(rdx, Immediate(shift));
3506 }
3507
3508 // RDX += 1 if RDX < 0
3509 __ movq(rax, rdx);
3510 __ shrq(rdx, Immediate(63));
3511 __ addq(rdx, rax);
3512
3513 if (instruction->IsRem()) {
3514 __ movq(rax, numerator);
3515
3516 if (IsInt<32>(imm)) {
3517 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3518 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003519 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003520 }
3521
3522 __ subq(rax, rdx);
3523 __ movq(rdx, rax);
3524 } else {
3525 __ movq(rax, rdx);
3526 }
3527 }
3528}
3529
Calin Juravlebacfec32014-11-14 15:54:36 +00003530void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3531 DCHECK(instruction->IsDiv() || instruction->IsRem());
3532 Primitive::Type type = instruction->GetResultType();
3533 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3534
3535 bool is_div = instruction->IsDiv();
3536 LocationSummary* locations = instruction->GetLocations();
3537
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003538 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3539 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003540
Roland Levillain271ab9c2014-11-27 15:23:57 +00003541 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003542 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003543
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003544 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003545 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003546
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003547 if (imm == 0) {
3548 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3549 } else if (imm == 1 || imm == -1) {
3550 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003551 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003552 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003553 } else {
3554 DCHECK(imm <= -2 || imm >= 2);
3555 GenerateDivRemWithAnyConstant(instruction);
3556 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003557 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003558 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003559 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003560 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003561 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003562
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003563 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3564 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3565 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3566 // so it's safe to just use negl instead of more complex comparisons.
3567 if (type == Primitive::kPrimInt) {
3568 __ cmpl(second_reg, Immediate(-1));
3569 __ j(kEqual, slow_path->GetEntryLabel());
3570 // edx:eax <- sign-extended of eax
3571 __ cdq();
3572 // eax = quotient, edx = remainder
3573 __ idivl(second_reg);
3574 } else {
3575 __ cmpq(second_reg, Immediate(-1));
3576 __ j(kEqual, slow_path->GetEntryLabel());
3577 // rdx:rax <- sign-extended of rax
3578 __ cqo();
3579 // rax = quotient, rdx = remainder
3580 __ idivq(second_reg);
3581 }
3582 __ Bind(slow_path->GetExitLabel());
3583 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003584}
3585
Calin Juravle7c4954d2014-10-28 16:57:40 +00003586void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3587 LocationSummary* locations =
3588 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3589 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003590 case Primitive::kPrimInt:
3591 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003592 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003593 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003594 locations->SetOut(Location::SameAsFirstInput());
3595 // Intel uses edx:eax as the dividend.
3596 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003597 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3598 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3599 // output and request another temp.
3600 if (div->InputAt(1)->IsConstant()) {
3601 locations->AddTemp(Location::RequiresRegister());
3602 }
Calin Juravled0d48522014-11-04 16:40:20 +00003603 break;
3604 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003605
Calin Juravle7c4954d2014-10-28 16:57:40 +00003606 case Primitive::kPrimFloat:
3607 case Primitive::kPrimDouble: {
3608 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003609 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003610 locations->SetOut(Location::SameAsFirstInput());
3611 break;
3612 }
3613
3614 default:
3615 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3616 }
3617}
3618
3619void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3620 LocationSummary* locations = div->GetLocations();
3621 Location first = locations->InAt(0);
3622 Location second = locations->InAt(1);
3623 DCHECK(first.Equals(locations->Out()));
3624
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003625 Primitive::Type type = div->GetResultType();
3626 switch (type) {
3627 case Primitive::kPrimInt:
3628 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003629 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003630 break;
3631 }
3632
Calin Juravle7c4954d2014-10-28 16:57:40 +00003633 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003634 if (second.IsFpuRegister()) {
3635 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3636 } else if (second.IsConstant()) {
3637 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003638 codegen_->LiteralFloatAddress(
3639 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003640 } else {
3641 DCHECK(second.IsStackSlot());
3642 __ divss(first.AsFpuRegister<XmmRegister>(),
3643 Address(CpuRegister(RSP), second.GetStackIndex()));
3644 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003645 break;
3646 }
3647
3648 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003649 if (second.IsFpuRegister()) {
3650 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3651 } else if (second.IsConstant()) {
3652 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003653 codegen_->LiteralDoubleAddress(
3654 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003655 } else {
3656 DCHECK(second.IsDoubleStackSlot());
3657 __ divsd(first.AsFpuRegister<XmmRegister>(),
3658 Address(CpuRegister(RSP), second.GetStackIndex()));
3659 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003660 break;
3661 }
3662
3663 default:
3664 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3665 }
3666}
3667
Calin Juravlebacfec32014-11-14 15:54:36 +00003668void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003669 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003670 LocationSummary* locations =
3671 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003672
3673 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003674 case Primitive::kPrimInt:
3675 case Primitive::kPrimLong: {
3676 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003677 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003678 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3679 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003680 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3681 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3682 // output and request another temp.
3683 if (rem->InputAt(1)->IsConstant()) {
3684 locations->AddTemp(Location::RequiresRegister());
3685 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003686 break;
3687 }
3688
3689 case Primitive::kPrimFloat:
3690 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003691 locations->SetInAt(0, Location::Any());
3692 locations->SetInAt(1, Location::Any());
3693 locations->SetOut(Location::RequiresFpuRegister());
3694 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003695 break;
3696 }
3697
3698 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003699 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003700 }
3701}
3702
3703void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3704 Primitive::Type type = rem->GetResultType();
3705 switch (type) {
3706 case Primitive::kPrimInt:
3707 case Primitive::kPrimLong: {
3708 GenerateDivRemIntegral(rem);
3709 break;
3710 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003711 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003712 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003713 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003714 break;
3715 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003716 default:
3717 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3718 }
3719}
3720
Calin Juravled0d48522014-11-04 16:40:20 +00003721void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003722 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3723 ? LocationSummary::kCallOnSlowPath
3724 : LocationSummary::kNoCall;
3725 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003726 locations->SetInAt(0, Location::Any());
3727 if (instruction->HasUses()) {
3728 locations->SetOut(Location::SameAsFirstInput());
3729 }
3730}
3731
3732void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003733 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003734 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3735 codegen_->AddSlowPath(slow_path);
3736
3737 LocationSummary* locations = instruction->GetLocations();
3738 Location value = locations->InAt(0);
3739
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003740 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003741 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003742 case Primitive::kPrimByte:
3743 case Primitive::kPrimChar:
3744 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003745 case Primitive::kPrimInt: {
3746 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003747 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003748 __ j(kEqual, slow_path->GetEntryLabel());
3749 } else if (value.IsStackSlot()) {
3750 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3751 __ j(kEqual, slow_path->GetEntryLabel());
3752 } else {
3753 DCHECK(value.IsConstant()) << value;
3754 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3755 __ jmp(slow_path->GetEntryLabel());
3756 }
3757 }
3758 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003759 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003760 case Primitive::kPrimLong: {
3761 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003762 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003763 __ j(kEqual, slow_path->GetEntryLabel());
3764 } else if (value.IsDoubleStackSlot()) {
3765 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3766 __ j(kEqual, slow_path->GetEntryLabel());
3767 } else {
3768 DCHECK(value.IsConstant()) << value;
3769 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3770 __ jmp(slow_path->GetEntryLabel());
3771 }
3772 }
3773 break;
3774 }
3775 default:
3776 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003777 }
Calin Juravled0d48522014-11-04 16:40:20 +00003778}
3779
Calin Juravle9aec02f2014-11-18 23:06:35 +00003780void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3781 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3782
3783 LocationSummary* locations =
3784 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3785
3786 switch (op->GetResultType()) {
3787 case Primitive::kPrimInt:
3788 case Primitive::kPrimLong: {
3789 locations->SetInAt(0, Location::RequiresRegister());
3790 // The shift count needs to be in CL.
3791 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3792 locations->SetOut(Location::SameAsFirstInput());
3793 break;
3794 }
3795 default:
3796 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3797 }
3798}
3799
3800void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3801 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3802
3803 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003804 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003805 Location second = locations->InAt(1);
3806
3807 switch (op->GetResultType()) {
3808 case Primitive::kPrimInt: {
3809 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003810 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003811 if (op->IsShl()) {
3812 __ shll(first_reg, second_reg);
3813 } else if (op->IsShr()) {
3814 __ sarl(first_reg, second_reg);
3815 } else {
3816 __ shrl(first_reg, second_reg);
3817 }
3818 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003819 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003820 if (op->IsShl()) {
3821 __ shll(first_reg, imm);
3822 } else if (op->IsShr()) {
3823 __ sarl(first_reg, imm);
3824 } else {
3825 __ shrl(first_reg, imm);
3826 }
3827 }
3828 break;
3829 }
3830 case Primitive::kPrimLong: {
3831 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003832 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003833 if (op->IsShl()) {
3834 __ shlq(first_reg, second_reg);
3835 } else if (op->IsShr()) {
3836 __ sarq(first_reg, second_reg);
3837 } else {
3838 __ shrq(first_reg, second_reg);
3839 }
3840 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003841 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003842 if (op->IsShl()) {
3843 __ shlq(first_reg, imm);
3844 } else if (op->IsShr()) {
3845 __ sarq(first_reg, imm);
3846 } else {
3847 __ shrq(first_reg, imm);
3848 }
3849 }
3850 break;
3851 }
3852 default:
3853 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003854 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003855 }
3856}
3857
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003858void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3859 LocationSummary* locations =
3860 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3861
3862 switch (ror->GetResultType()) {
3863 case Primitive::kPrimInt:
3864 case Primitive::kPrimLong: {
3865 locations->SetInAt(0, Location::RequiresRegister());
3866 // The shift count needs to be in CL (unless it is a constant).
3867 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3868 locations->SetOut(Location::SameAsFirstInput());
3869 break;
3870 }
3871 default:
3872 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3873 UNREACHABLE();
3874 }
3875}
3876
3877void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3878 LocationSummary* locations = ror->GetLocations();
3879 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3880 Location second = locations->InAt(1);
3881
3882 switch (ror->GetResultType()) {
3883 case Primitive::kPrimInt:
3884 if (second.IsRegister()) {
3885 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3886 __ rorl(first_reg, second_reg);
3887 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003888 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003889 __ rorl(first_reg, imm);
3890 }
3891 break;
3892 case Primitive::kPrimLong:
3893 if (second.IsRegister()) {
3894 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3895 __ rorq(first_reg, second_reg);
3896 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003897 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003898 __ rorq(first_reg, imm);
3899 }
3900 break;
3901 default:
3902 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3903 UNREACHABLE();
3904 }
3905}
3906
Calin Juravle9aec02f2014-11-18 23:06:35 +00003907void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3908 HandleShift(shl);
3909}
3910
3911void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3912 HandleShift(shl);
3913}
3914
3915void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3916 HandleShift(shr);
3917}
3918
3919void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3920 HandleShift(shr);
3921}
3922
3923void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3924 HandleShift(ushr);
3925}
3926
3927void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3928 HandleShift(ushr);
3929}
3930
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003931void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003932 LocationSummary* locations =
3933 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003934 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003935 if (instruction->IsStringAlloc()) {
3936 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3937 } else {
3938 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3939 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3940 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003941 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003942}
3943
3944void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003945 // Note: if heap poisoning is enabled, the entry point takes cares
3946 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003947 if (instruction->IsStringAlloc()) {
3948 // String is allocated through StringFactory. Call NewEmptyString entry point.
3949 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3950 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3951 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3952 __ call(Address(temp, code_offset.SizeValue()));
3953 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3954 } else {
3955 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3956 instruction,
3957 instruction->GetDexPc(),
3958 nullptr);
3959 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3960 DCHECK(!codegen_->IsLeafMethod());
3961 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003962}
3963
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003964void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3965 LocationSummary* locations =
3966 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3967 InvokeRuntimeCallingConvention calling_convention;
3968 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003969 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003970 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003971 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003972}
3973
3974void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3975 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003976 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3977 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003978 // Note: if heap poisoning is enabled, the entry point takes cares
3979 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003980 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3981 instruction,
3982 instruction->GetDexPc(),
3983 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003984 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003985
3986 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003987}
3988
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003989void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003990 LocationSummary* locations =
3991 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003992 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3993 if (location.IsStackSlot()) {
3994 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3995 } else if (location.IsDoubleStackSlot()) {
3996 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3997 }
3998 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003999}
4000
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004001void InstructionCodeGeneratorX86_64::VisitParameterValue(
4002 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004003 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004004}
4005
4006void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4007 LocationSummary* locations =
4008 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4009 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4010}
4011
4012void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4013 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4014 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004015}
4016
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004017void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4018 LocationSummary* locations =
4019 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4020 locations->SetInAt(0, Location::RequiresRegister());
4021 locations->SetOut(Location::RequiresRegister());
4022}
4023
4024void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4025 LocationSummary* locations = instruction->GetLocations();
4026 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00004027 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004028 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4029 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
4030 } else {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004031 __ movq(locations->Out().AsRegister<CpuRegister>(),
4032 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4033 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
4034 method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
4035 instruction->GetIndex() % ImTable::kSize, kX86_64PointerSize));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004036 }
4037 __ movq(locations->Out().AsRegister<CpuRegister>(),
4038 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
4039}
4040
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004041void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004042 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004043 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004044 locations->SetInAt(0, Location::RequiresRegister());
4045 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004046}
4047
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004048void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4049 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004050 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4051 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004052 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004053 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004054 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004055 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004056 break;
4057
4058 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004059 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004060 break;
4061
4062 default:
4063 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4064 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004065}
4066
David Brazdil66d126e2015-04-03 16:02:44 +01004067void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4068 LocationSummary* locations =
4069 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4070 locations->SetInAt(0, Location::RequiresRegister());
4071 locations->SetOut(Location::SameAsFirstInput());
4072}
4073
4074void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004075 LocationSummary* locations = bool_not->GetLocations();
4076 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4077 locations->Out().AsRegister<CpuRegister>().AsRegister());
4078 Location out = locations->Out();
4079 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4080}
4081
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004082void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004083 LocationSummary* locations =
4084 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004085 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004086 locations->SetInAt(i, Location::Any());
4087 }
4088 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004089}
4090
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004091void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004092 LOG(FATAL) << "Unimplemented";
4093}
4094
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004095void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004096 /*
4097 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004098 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004099 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4100 */
4101 switch (kind) {
4102 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004103 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004104 break;
4105 }
4106 case MemBarrierKind::kAnyStore:
4107 case MemBarrierKind::kLoadAny:
4108 case MemBarrierKind::kStoreStore: {
4109 // nop
4110 break;
4111 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004112 case MemBarrierKind::kNTStoreStore:
4113 // Non-Temporal Store/Store needs an explicit fence.
4114 MemoryFence(/* non-temporal */ true);
4115 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004116 }
4117}
4118
4119void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4120 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4121
Roland Levillain0d5a2812015-11-13 10:07:31 +00004122 bool object_field_get_with_read_barrier =
4123 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004124 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004125 new (GetGraph()->GetArena()) LocationSummary(instruction,
4126 object_field_get_with_read_barrier ?
4127 LocationSummary::kCallOnSlowPath :
4128 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004129 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004130 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4131 locations->SetOut(Location::RequiresFpuRegister());
4132 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004133 // The output overlaps for an object field get when read barriers
4134 // are enabled: we do not want the move to overwrite the object's
4135 // location, as we need it to emit the read barrier.
4136 locations->SetOut(
4137 Location::RequiresRegister(),
4138 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004139 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004140 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4141 // We need a temporary register for the read barrier marking slow
4142 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4143 locations->AddTemp(Location::RequiresRegister());
4144 }
Calin Juravle52c48962014-12-16 17:02:57 +00004145}
4146
4147void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4148 const FieldInfo& field_info) {
4149 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4150
4151 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004152 Location base_loc = locations->InAt(0);
4153 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004154 Location out = locations->Out();
4155 bool is_volatile = field_info.IsVolatile();
4156 Primitive::Type field_type = field_info.GetFieldType();
4157 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4158
4159 switch (field_type) {
4160 case Primitive::kPrimBoolean: {
4161 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4162 break;
4163 }
4164
4165 case Primitive::kPrimByte: {
4166 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4167 break;
4168 }
4169
4170 case Primitive::kPrimShort: {
4171 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4172 break;
4173 }
4174
4175 case Primitive::kPrimChar: {
4176 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4177 break;
4178 }
4179
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004180 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004181 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4182 break;
4183 }
4184
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004185 case Primitive::kPrimNot: {
4186 // /* HeapReference<Object> */ out = *(base + offset)
4187 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4188 Location temp_loc = locations->GetTemp(0);
4189 // Note that a potential implicit null check is handled in this
4190 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4191 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4192 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4193 if (is_volatile) {
4194 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4195 }
4196 } else {
4197 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4198 codegen_->MaybeRecordImplicitNullCheck(instruction);
4199 if (is_volatile) {
4200 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4201 }
4202 // If read barriers are enabled, emit read barriers other than
4203 // Baker's using a slow path (and also unpoison the loaded
4204 // reference, if heap poisoning is enabled).
4205 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4206 }
4207 break;
4208 }
4209
Calin Juravle52c48962014-12-16 17:02:57 +00004210 case Primitive::kPrimLong: {
4211 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4212 break;
4213 }
4214
4215 case Primitive::kPrimFloat: {
4216 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4217 break;
4218 }
4219
4220 case Primitive::kPrimDouble: {
4221 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4222 break;
4223 }
4224
4225 case Primitive::kPrimVoid:
4226 LOG(FATAL) << "Unreachable type " << field_type;
4227 UNREACHABLE();
4228 }
4229
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004230 if (field_type == Primitive::kPrimNot) {
4231 // Potential implicit null checks, in the case of reference
4232 // fields, are handled in the previous switch statement.
4233 } else {
4234 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004235 }
Roland Levillain4d027112015-07-01 15:41:14 +01004236
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004237 if (is_volatile) {
4238 if (field_type == Primitive::kPrimNot) {
4239 // Memory barriers, in the case of references, are also handled
4240 // in the previous switch statement.
4241 } else {
4242 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4243 }
Roland Levillain4d027112015-07-01 15:41:14 +01004244 }
Calin Juravle52c48962014-12-16 17:02:57 +00004245}
4246
4247void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4248 const FieldInfo& field_info) {
4249 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4250
4251 LocationSummary* locations =
4252 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004253 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004254 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004255 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004256 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004257
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004258 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004259 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004260 if (is_volatile) {
4261 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4262 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4263 } else {
4264 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4265 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004266 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004267 if (is_volatile) {
4268 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4269 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4270 } else {
4271 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4272 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004273 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004274 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004275 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004276 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004277 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004278 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4279 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004280 locations->AddTemp(Location::RequiresRegister());
4281 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004282}
4283
Calin Juravle52c48962014-12-16 17:02:57 +00004284void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004285 const FieldInfo& field_info,
4286 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004287 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4288
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004289 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004290 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4291 Location value = locations->InAt(1);
4292 bool is_volatile = field_info.IsVolatile();
4293 Primitive::Type field_type = field_info.GetFieldType();
4294 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4295
4296 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004297 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004298 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004299
Mark Mendellea5af682015-10-22 17:35:49 -04004300 bool maybe_record_implicit_null_check_done = false;
4301
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004302 switch (field_type) {
4303 case Primitive::kPrimBoolean:
4304 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004305 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004306 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004307 __ movb(Address(base, offset), Immediate(v));
4308 } else {
4309 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4310 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004311 break;
4312 }
4313
4314 case Primitive::kPrimShort:
4315 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004316 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004317 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004318 __ movw(Address(base, offset), Immediate(v));
4319 } else {
4320 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4321 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004322 break;
4323 }
4324
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004325 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004326 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004327 if (value.IsConstant()) {
4328 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004329 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4330 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4331 // Note: if heap poisoning is enabled, no need to poison
4332 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004333 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004334 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004335 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4336 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4337 __ movl(temp, value.AsRegister<CpuRegister>());
4338 __ PoisonHeapReference(temp);
4339 __ movl(Address(base, offset), temp);
4340 } else {
4341 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4342 }
Mark Mendell40741f32015-04-20 22:10:34 -04004343 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004344 break;
4345 }
4346
4347 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004348 if (value.IsConstant()) {
4349 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004350 codegen_->MoveInt64ToAddress(Address(base, offset),
4351 Address(base, offset + sizeof(int32_t)),
4352 v,
4353 instruction);
4354 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004355 } else {
4356 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4357 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004358 break;
4359 }
4360
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004361 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004362 if (value.IsConstant()) {
4363 int32_t v =
4364 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4365 __ movl(Address(base, offset), Immediate(v));
4366 } else {
4367 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4368 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004369 break;
4370 }
4371
4372 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004373 if (value.IsConstant()) {
4374 int64_t v =
4375 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4376 codegen_->MoveInt64ToAddress(Address(base, offset),
4377 Address(base, offset + sizeof(int32_t)),
4378 v,
4379 instruction);
4380 maybe_record_implicit_null_check_done = true;
4381 } else {
4382 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4383 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004384 break;
4385 }
4386
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004387 case Primitive::kPrimVoid:
4388 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004389 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004390 }
Calin Juravle52c48962014-12-16 17:02:57 +00004391
Mark Mendellea5af682015-10-22 17:35:49 -04004392 if (!maybe_record_implicit_null_check_done) {
4393 codegen_->MaybeRecordImplicitNullCheck(instruction);
4394 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004395
4396 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4397 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4398 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004399 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004400 }
4401
Calin Juravle52c48962014-12-16 17:02:57 +00004402 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004403 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004404 }
4405}
4406
4407void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4408 HandleFieldSet(instruction, instruction->GetFieldInfo());
4409}
4410
4411void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004412 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004413}
4414
4415void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004416 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004417}
4418
4419void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004420 HandleFieldGet(instruction, instruction->GetFieldInfo());
4421}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004422
Calin Juravle52c48962014-12-16 17:02:57 +00004423void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4424 HandleFieldGet(instruction);
4425}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004426
Calin Juravle52c48962014-12-16 17:02:57 +00004427void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4428 HandleFieldGet(instruction, instruction->GetFieldInfo());
4429}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004430
Calin Juravle52c48962014-12-16 17:02:57 +00004431void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4432 HandleFieldSet(instruction, instruction->GetFieldInfo());
4433}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004434
Calin Juravle52c48962014-12-16 17:02:57 +00004435void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004436 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004437}
4438
Calin Juravlee460d1d2015-09-29 04:52:17 +01004439void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4440 HUnresolvedInstanceFieldGet* instruction) {
4441 FieldAccessCallingConventionX86_64 calling_convention;
4442 codegen_->CreateUnresolvedFieldLocationSummary(
4443 instruction, instruction->GetFieldType(), calling_convention);
4444}
4445
4446void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4447 HUnresolvedInstanceFieldGet* instruction) {
4448 FieldAccessCallingConventionX86_64 calling_convention;
4449 codegen_->GenerateUnresolvedFieldAccess(instruction,
4450 instruction->GetFieldType(),
4451 instruction->GetFieldIndex(),
4452 instruction->GetDexPc(),
4453 calling_convention);
4454}
4455
4456void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4457 HUnresolvedInstanceFieldSet* instruction) {
4458 FieldAccessCallingConventionX86_64 calling_convention;
4459 codegen_->CreateUnresolvedFieldLocationSummary(
4460 instruction, instruction->GetFieldType(), calling_convention);
4461}
4462
4463void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4464 HUnresolvedInstanceFieldSet* instruction) {
4465 FieldAccessCallingConventionX86_64 calling_convention;
4466 codegen_->GenerateUnresolvedFieldAccess(instruction,
4467 instruction->GetFieldType(),
4468 instruction->GetFieldIndex(),
4469 instruction->GetDexPc(),
4470 calling_convention);
4471}
4472
4473void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4474 HUnresolvedStaticFieldGet* instruction) {
4475 FieldAccessCallingConventionX86_64 calling_convention;
4476 codegen_->CreateUnresolvedFieldLocationSummary(
4477 instruction, instruction->GetFieldType(), calling_convention);
4478}
4479
4480void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4481 HUnresolvedStaticFieldGet* instruction) {
4482 FieldAccessCallingConventionX86_64 calling_convention;
4483 codegen_->GenerateUnresolvedFieldAccess(instruction,
4484 instruction->GetFieldType(),
4485 instruction->GetFieldIndex(),
4486 instruction->GetDexPc(),
4487 calling_convention);
4488}
4489
4490void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4491 HUnresolvedStaticFieldSet* instruction) {
4492 FieldAccessCallingConventionX86_64 calling_convention;
4493 codegen_->CreateUnresolvedFieldLocationSummary(
4494 instruction, instruction->GetFieldType(), calling_convention);
4495}
4496
4497void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4498 HUnresolvedStaticFieldSet* instruction) {
4499 FieldAccessCallingConventionX86_64 calling_convention;
4500 codegen_->GenerateUnresolvedFieldAccess(instruction,
4501 instruction->GetFieldType(),
4502 instruction->GetFieldIndex(),
4503 instruction->GetDexPc(),
4504 calling_convention);
4505}
4506
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004507void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004508 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4509 ? LocationSummary::kCallOnSlowPath
4510 : LocationSummary::kNoCall;
4511 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4512 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004513 ? Location::RequiresRegister()
4514 : Location::Any();
4515 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004516 if (instruction->HasUses()) {
4517 locations->SetOut(Location::SameAsFirstInput());
4518 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004519}
4520
Calin Juravle2ae48182016-03-16 14:05:09 +00004521void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4522 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004523 return;
4524 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004525 LocationSummary* locations = instruction->GetLocations();
4526 Location obj = locations->InAt(0);
4527
4528 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004529 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004530}
4531
Calin Juravle2ae48182016-03-16 14:05:09 +00004532void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004533 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004534 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004535
4536 LocationSummary* locations = instruction->GetLocations();
4537 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004538
4539 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004540 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004541 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004542 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004543 } else {
4544 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004545 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004546 __ jmp(slow_path->GetEntryLabel());
4547 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004548 }
4549 __ j(kEqual, slow_path->GetEntryLabel());
4550}
4551
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004552void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004553 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004554}
4555
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004556void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004557 bool object_array_get_with_read_barrier =
4558 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004559 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004560 new (GetGraph()->GetArena()) LocationSummary(instruction,
4561 object_array_get_with_read_barrier ?
4562 LocationSummary::kCallOnSlowPath :
4563 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004564 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004565 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004566 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4567 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4568 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004569 // The output overlaps for an object array get when read barriers
4570 // are enabled: we do not want the move to overwrite the array's
4571 // location, as we need it to emit the read barrier.
4572 locations->SetOut(
4573 Location::RequiresRegister(),
4574 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004575 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004576 // We need a temporary register for the read barrier marking slow
4577 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4578 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4579 locations->AddTemp(Location::RequiresRegister());
4580 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004581}
4582
4583void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4584 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004585 Location obj_loc = locations->InAt(0);
4586 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004587 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004588 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004589 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004590
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004591 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004592 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004593 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004594 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004595 if (index.IsConstant()) {
4596 __ movzxb(out, Address(obj,
4597 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4598 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004599 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004600 }
4601 break;
4602 }
4603
4604 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004605 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004606 if (index.IsConstant()) {
4607 __ movsxb(out, Address(obj,
4608 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4609 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004610 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004611 }
4612 break;
4613 }
4614
4615 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004616 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004617 if (index.IsConstant()) {
4618 __ movsxw(out, Address(obj,
4619 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4620 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004621 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004622 }
4623 break;
4624 }
4625
4626 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004627 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004628 if (index.IsConstant()) {
4629 __ movzxw(out, Address(obj,
4630 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4631 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004632 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004633 }
4634 break;
4635 }
4636
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004637 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004638 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004639 if (index.IsConstant()) {
4640 __ movl(out, Address(obj,
4641 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4642 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004643 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004644 }
4645 break;
4646 }
4647
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004648 case Primitive::kPrimNot: {
4649 static_assert(
4650 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4651 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004652 // /* HeapReference<Object> */ out =
4653 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4654 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4655 Location temp = locations->GetTemp(0);
4656 // Note that a potential implicit null check is handled in this
4657 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4658 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4659 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4660 } else {
4661 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4662 if (index.IsConstant()) {
4663 uint32_t offset =
4664 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4665 __ movl(out, Address(obj, offset));
4666 codegen_->MaybeRecordImplicitNullCheck(instruction);
4667 // If read barriers are enabled, emit read barriers other than
4668 // Baker's using a slow path (and also unpoison the loaded
4669 // reference, if heap poisoning is enabled).
4670 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4671 } else {
4672 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4673 codegen_->MaybeRecordImplicitNullCheck(instruction);
4674 // If read barriers are enabled, emit read barriers other than
4675 // Baker's using a slow path (and also unpoison the loaded
4676 // reference, if heap poisoning is enabled).
4677 codegen_->MaybeGenerateReadBarrierSlow(
4678 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4679 }
4680 }
4681 break;
4682 }
4683
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004684 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004685 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004686 if (index.IsConstant()) {
4687 __ movq(out, Address(obj,
4688 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4689 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004690 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004691 }
4692 break;
4693 }
4694
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004695 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004696 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004697 if (index.IsConstant()) {
4698 __ movss(out, Address(obj,
4699 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4700 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004701 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004702 }
4703 break;
4704 }
4705
4706 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004707 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004708 if (index.IsConstant()) {
4709 __ movsd(out, Address(obj,
4710 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4711 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004712 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004713 }
4714 break;
4715 }
4716
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004717 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004718 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004719 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004720 }
Roland Levillain4d027112015-07-01 15:41:14 +01004721
4722 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004723 // Potential implicit null checks, in the case of reference
4724 // arrays, are handled in the previous switch statement.
4725 } else {
4726 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004727 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004728}
4729
4730void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004731 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004732
4733 bool needs_write_barrier =
4734 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004735 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004736 bool object_array_set_with_read_barrier =
4737 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004738
Nicolas Geoffray39468442014-09-02 15:17:15 +01004739 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004740 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004741 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004742 LocationSummary::kCallOnSlowPath :
4743 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004744
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004745 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004746 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4747 if (Primitive::IsFloatingPointType(value_type)) {
4748 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004749 } else {
4750 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4751 }
4752
4753 if (needs_write_barrier) {
4754 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004755
4756 // This first temporary register is possibly used for heap
4757 // reference poisoning and/or read barrier emission too.
4758 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004759 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004760 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004761}
4762
4763void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4764 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004765 Location array_loc = locations->InAt(0);
4766 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004767 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004768 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004769 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004770 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004771 bool needs_write_barrier =
4772 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004773 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4774 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4775 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004776
4777 switch (value_type) {
4778 case Primitive::kPrimBoolean:
4779 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004780 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4781 Address address = index.IsConstant()
4782 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4783 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4784 if (value.IsRegister()) {
4785 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004786 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004787 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004788 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004789 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004790 break;
4791 }
4792
4793 case Primitive::kPrimShort:
4794 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004795 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4796 Address address = index.IsConstant()
4797 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4798 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4799 if (value.IsRegister()) {
4800 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004801 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004802 DCHECK(value.IsConstant()) << value;
4803 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004804 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004805 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004806 break;
4807 }
4808
4809 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004810 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4811 Address address = index.IsConstant()
4812 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4813 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004814
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004815 if (!value.IsRegister()) {
4816 // Just setting null.
4817 DCHECK(instruction->InputAt(2)->IsNullConstant());
4818 DCHECK(value.IsConstant()) << value;
4819 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004820 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004821 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004822 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004823 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004824 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004825
4826 DCHECK(needs_write_barrier);
4827 CpuRegister register_value = value.AsRegister<CpuRegister>();
4828 NearLabel done, not_null, do_put;
4829 SlowPathCode* slow_path = nullptr;
4830 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004831 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004832 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4833 codegen_->AddSlowPath(slow_path);
4834 if (instruction->GetValueCanBeNull()) {
4835 __ testl(register_value, register_value);
4836 __ j(kNotEqual, &not_null);
4837 __ movl(address, Immediate(0));
4838 codegen_->MaybeRecordImplicitNullCheck(instruction);
4839 __ jmp(&done);
4840 __ Bind(&not_null);
4841 }
4842
Roland Levillain0d5a2812015-11-13 10:07:31 +00004843 if (kEmitCompilerReadBarrier) {
4844 // When read barriers are enabled, the type checking
4845 // instrumentation requires two read barriers:
4846 //
4847 // __ movl(temp2, temp);
4848 // // /* HeapReference<Class> */ temp = temp->component_type_
4849 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004850 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004851 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4852 //
4853 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4854 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004855 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004856 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4857 //
4858 // __ cmpl(temp, temp2);
4859 //
4860 // However, the second read barrier may trash `temp`, as it
4861 // is a temporary register, and as such would not be saved
4862 // along with live registers before calling the runtime (nor
4863 // restored afterwards). So in this case, we bail out and
4864 // delegate the work to the array set slow path.
4865 //
4866 // TODO: Extend the register allocator to support a new
4867 // "(locally) live temp" location so as to avoid always
4868 // going into the slow path when read barriers are enabled.
4869 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004870 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004871 // /* HeapReference<Class> */ temp = array->klass_
4872 __ movl(temp, Address(array, class_offset));
4873 codegen_->MaybeRecordImplicitNullCheck(instruction);
4874 __ MaybeUnpoisonHeapReference(temp);
4875
4876 // /* HeapReference<Class> */ temp = temp->component_type_
4877 __ movl(temp, Address(temp, component_offset));
4878 // If heap poisoning is enabled, no need to unpoison `temp`
4879 // nor the object reference in `register_value->klass`, as
4880 // we are comparing two poisoned references.
4881 __ cmpl(temp, Address(register_value, class_offset));
4882
4883 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4884 __ j(kEqual, &do_put);
4885 // If heap poisoning is enabled, the `temp` reference has
4886 // not been unpoisoned yet; unpoison it now.
4887 __ MaybeUnpoisonHeapReference(temp);
4888
4889 // /* HeapReference<Class> */ temp = temp->super_class_
4890 __ movl(temp, Address(temp, super_offset));
4891 // If heap poisoning is enabled, no need to unpoison
4892 // `temp`, as we are comparing against null below.
4893 __ testl(temp, temp);
4894 __ j(kNotEqual, slow_path->GetEntryLabel());
4895 __ Bind(&do_put);
4896 } else {
4897 __ j(kNotEqual, slow_path->GetEntryLabel());
4898 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004899 }
4900 }
4901
4902 if (kPoisonHeapReferences) {
4903 __ movl(temp, register_value);
4904 __ PoisonHeapReference(temp);
4905 __ movl(address, temp);
4906 } else {
4907 __ movl(address, register_value);
4908 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004909 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004910 codegen_->MaybeRecordImplicitNullCheck(instruction);
4911 }
4912
4913 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4914 codegen_->MarkGCCard(
4915 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4916 __ Bind(&done);
4917
4918 if (slow_path != nullptr) {
4919 __ Bind(slow_path->GetExitLabel());
4920 }
4921
4922 break;
4923 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004924
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004925 case Primitive::kPrimInt: {
4926 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4927 Address address = index.IsConstant()
4928 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4929 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4930 if (value.IsRegister()) {
4931 __ movl(address, value.AsRegister<CpuRegister>());
4932 } else {
4933 DCHECK(value.IsConstant()) << value;
4934 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4935 __ movl(address, Immediate(v));
4936 }
4937 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004938 break;
4939 }
4940
4941 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004942 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4943 Address address = index.IsConstant()
4944 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4945 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4946 if (value.IsRegister()) {
4947 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004948 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004949 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004950 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004951 Address address_high = index.IsConstant()
4952 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4953 offset + sizeof(int32_t))
4954 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4955 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004956 }
4957 break;
4958 }
4959
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004960 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004961 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4962 Address address = index.IsConstant()
4963 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4964 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004965 if (value.IsFpuRegister()) {
4966 __ movss(address, value.AsFpuRegister<XmmRegister>());
4967 } else {
4968 DCHECK(value.IsConstant());
4969 int32_t v =
4970 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4971 __ movl(address, Immediate(v));
4972 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004973 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004974 break;
4975 }
4976
4977 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004978 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4979 Address address = index.IsConstant()
4980 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4981 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004982 if (value.IsFpuRegister()) {
4983 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4984 codegen_->MaybeRecordImplicitNullCheck(instruction);
4985 } else {
4986 int64_t v =
4987 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4988 Address address_high = index.IsConstant()
4989 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4990 offset + sizeof(int32_t))
4991 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4992 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4993 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004994 break;
4995 }
4996
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004997 case Primitive::kPrimVoid:
4998 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004999 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005000 }
5001}
5002
5003void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005004 LocationSummary* locations =
5005 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005006 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005007 if (!instruction->IsEmittedAtUseSite()) {
5008 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5009 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005010}
5011
5012void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005013 if (instruction->IsEmittedAtUseSite()) {
5014 return;
5015 }
5016
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005017 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005018 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005019 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5020 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005021 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005022 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005023}
5024
5025void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005026 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5027 ? LocationSummary::kCallOnSlowPath
5028 : LocationSummary::kNoCall;
5029 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005030 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005031 HInstruction* length = instruction->InputAt(1);
5032 if (!length->IsEmittedAtUseSite()) {
5033 locations->SetInAt(1, Location::RegisterOrConstant(length));
5034 }
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005035 if (instruction->HasUses()) {
5036 locations->SetOut(Location::SameAsFirstInput());
5037 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005038}
5039
5040void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5041 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005042 Location index_loc = locations->InAt(0);
5043 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005044 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005045
Mark Mendell99dbd682015-04-22 16:18:52 -04005046 if (length_loc.IsConstant()) {
5047 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5048 if (index_loc.IsConstant()) {
5049 // BCE will remove the bounds check if we are guarenteed to pass.
5050 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5051 if (index < 0 || index >= length) {
5052 codegen_->AddSlowPath(slow_path);
5053 __ jmp(slow_path->GetEntryLabel());
5054 } else {
5055 // Some optimization after BCE may have generated this, and we should not
5056 // generate a bounds check if it is a valid range.
5057 }
5058 return;
5059 }
5060
5061 // We have to reverse the jump condition because the length is the constant.
5062 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5063 __ cmpl(index_reg, Immediate(length));
5064 codegen_->AddSlowPath(slow_path);
5065 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005066 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005067 HInstruction* array_length = instruction->InputAt(1);
5068 if (array_length->IsEmittedAtUseSite()) {
5069 // Address the length field in the array.
5070 DCHECK(array_length->IsArrayLength());
5071 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5072 Location array_loc = array_length->GetLocations()->InAt(0);
5073 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
5074 if (index_loc.IsConstant()) {
5075 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5076 __ cmpl(array_len, Immediate(value));
5077 } else {
5078 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5079 }
5080 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendell99dbd682015-04-22 16:18:52 -04005081 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005082 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5083 if (index_loc.IsConstant()) {
5084 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5085 __ cmpl(length, Immediate(value));
5086 } else {
5087 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5088 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005089 }
5090 codegen_->AddSlowPath(slow_path);
5091 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005092 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005093}
5094
5095void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5096 CpuRegister card,
5097 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005098 CpuRegister value,
5099 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005100 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005101 if (value_can_be_null) {
5102 __ testl(value, value);
5103 __ j(kEqual, &is_null);
5104 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005105 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5106 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005107 __ movq(temp, object);
5108 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005109 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005110 if (value_can_be_null) {
5111 __ Bind(&is_null);
5112 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005113}
5114
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005115void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005116 LOG(FATAL) << "Unimplemented";
5117}
5118
5119void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005120 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5121}
5122
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005123void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5124 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5125}
5126
5127void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005128 HBasicBlock* block = instruction->GetBlock();
5129 if (block->GetLoopInformation() != nullptr) {
5130 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5131 // The back edge will generate the suspend check.
5132 return;
5133 }
5134 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5135 // The goto will generate the suspend check.
5136 return;
5137 }
5138 GenerateSuspendCheck(instruction, nullptr);
5139}
5140
5141void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5142 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005143 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005144 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5145 if (slow_path == nullptr) {
5146 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5147 instruction->SetSlowPath(slow_path);
5148 codegen_->AddSlowPath(slow_path);
5149 if (successor != nullptr) {
5150 DCHECK(successor->IsLoopHeader());
5151 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5152 }
5153 } else {
5154 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5155 }
5156
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005157 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5158 /* no_rip */ true),
5159 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005160 if (successor == nullptr) {
5161 __ j(kNotEqual, slow_path->GetEntryLabel());
5162 __ Bind(slow_path->GetReturnLabel());
5163 } else {
5164 __ j(kEqual, codegen_->GetLabelOf(successor));
5165 __ jmp(slow_path->GetEntryLabel());
5166 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005167}
5168
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005169X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5170 return codegen_->GetAssembler();
5171}
5172
5173void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005174 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005175 Location source = move->GetSource();
5176 Location destination = move->GetDestination();
5177
5178 if (source.IsRegister()) {
5179 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005180 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005181 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005182 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005183 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005184 } else {
5185 DCHECK(destination.IsDoubleStackSlot());
5186 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005187 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005188 }
5189 } else if (source.IsStackSlot()) {
5190 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005191 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005192 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005193 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005194 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005195 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005196 } else {
5197 DCHECK(destination.IsStackSlot());
5198 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5199 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5200 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005201 } else if (source.IsDoubleStackSlot()) {
5202 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005203 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005204 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005205 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005206 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5207 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005208 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005209 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005210 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5211 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5212 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005213 } else if (source.IsConstant()) {
5214 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005215 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5216 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005217 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005218 if (value == 0) {
5219 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5220 } else {
5221 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5222 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005223 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005224 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005225 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005226 }
5227 } else if (constant->IsLongConstant()) {
5228 int64_t value = constant->AsLongConstant()->GetValue();
5229 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005230 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005231 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005232 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005233 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005234 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005235 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005236 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005237 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005238 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005239 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005240 } else {
5241 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005242 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005243 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5244 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005245 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005246 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005247 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005248 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005249 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005250 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005251 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005252 } else {
5253 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005254 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005255 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005256 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005257 } else if (source.IsFpuRegister()) {
5258 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005259 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005260 } else if (destination.IsStackSlot()) {
5261 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005262 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005263 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005264 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005265 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005266 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005267 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005268 }
5269}
5270
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005271void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005272 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005273 __ movl(Address(CpuRegister(RSP), mem), reg);
5274 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005275}
5276
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005277void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005278 ScratchRegisterScope ensure_scratch(
5279 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5280
5281 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5282 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5283 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5284 Address(CpuRegister(RSP), mem2 + stack_offset));
5285 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5286 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5287 CpuRegister(ensure_scratch.GetRegister()));
5288}
5289
Mark Mendell8a1c7282015-06-29 15:41:28 -04005290void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5291 __ movq(CpuRegister(TMP), reg1);
5292 __ movq(reg1, reg2);
5293 __ movq(reg2, CpuRegister(TMP));
5294}
5295
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005296void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5297 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5298 __ movq(Address(CpuRegister(RSP), mem), reg);
5299 __ movq(reg, CpuRegister(TMP));
5300}
5301
5302void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5303 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005304 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005305
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005306 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5307 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5308 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5309 Address(CpuRegister(RSP), mem2 + stack_offset));
5310 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5311 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5312 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005313}
5314
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005315void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5316 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5317 __ movss(Address(CpuRegister(RSP), mem), reg);
5318 __ movd(reg, CpuRegister(TMP));
5319}
5320
5321void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5322 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5323 __ movsd(Address(CpuRegister(RSP), mem), reg);
5324 __ movd(reg, CpuRegister(TMP));
5325}
5326
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005327void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005328 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005329 Location source = move->GetSource();
5330 Location destination = move->GetDestination();
5331
5332 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005333 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005334 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005335 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005336 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005337 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005338 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005339 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5340 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005341 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005342 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005343 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005344 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5345 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005346 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005347 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5348 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5349 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005350 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005351 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005352 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005353 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005354 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005355 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005356 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005357 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005358 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005359 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005360 }
5361}
5362
5363
5364void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5365 __ pushq(CpuRegister(reg));
5366}
5367
5368
5369void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5370 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005371}
5372
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005373void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005374 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005375 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5376 Immediate(mirror::Class::kStatusInitialized));
5377 __ j(kLess, slow_path->GetEntryLabel());
5378 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005379 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005380}
5381
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005382HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5383 HLoadClass::LoadKind desired_class_load_kind) {
5384 if (kEmitCompilerReadBarrier) {
5385 switch (desired_class_load_kind) {
5386 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5387 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5388 case HLoadClass::LoadKind::kBootImageAddress:
5389 // TODO: Implement for read barrier.
5390 return HLoadClass::LoadKind::kDexCacheViaMethod;
5391 default:
5392 break;
5393 }
5394 }
5395 switch (desired_class_load_kind) {
5396 case HLoadClass::LoadKind::kReferrersClass:
5397 break;
5398 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5399 DCHECK(!GetCompilerOptions().GetCompilePic());
5400 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5401 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5402 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5403 DCHECK(GetCompilerOptions().GetCompilePic());
5404 break;
5405 case HLoadClass::LoadKind::kBootImageAddress:
5406 break;
5407 case HLoadClass::LoadKind::kDexCacheAddress:
5408 DCHECK(Runtime::Current()->UseJitCompilation());
5409 break;
5410 case HLoadClass::LoadKind::kDexCachePcRelative:
5411 DCHECK(!Runtime::Current()->UseJitCompilation());
5412 break;
5413 case HLoadClass::LoadKind::kDexCacheViaMethod:
5414 break;
5415 }
5416 return desired_class_load_kind;
5417}
5418
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005419void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005420 if (cls->NeedsAccessCheck()) {
5421 InvokeRuntimeCallingConvention calling_convention;
5422 CodeGenerator::CreateLoadClassLocationSummary(
5423 cls,
5424 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5425 Location::RegisterLocation(RAX),
5426 /* code_generator_supports_read_barrier */ true);
5427 return;
5428 }
5429
5430 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
5431 ? LocationSummary::kCallOnSlowPath
5432 : LocationSummary::kNoCall;
5433 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
5434 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5435 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5436 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5437 locations->SetInAt(0, Location::RequiresRegister());
5438 }
5439 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005440}
5441
5442void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005443 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005444 if (cls->NeedsAccessCheck()) {
5445 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5446 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5447 cls,
5448 cls->GetDexPc(),
5449 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005450 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005451 return;
5452 }
5453
Roland Levillain0d5a2812015-11-13 10:07:31 +00005454 Location out_loc = locations->Out();
5455 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005456
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005457 bool generate_null_check = false;
5458 switch (cls->GetLoadKind()) {
5459 case HLoadClass::LoadKind::kReferrersClass: {
5460 DCHECK(!cls->CanCallRuntime());
5461 DCHECK(!cls->MustGenerateClinitCheck());
5462 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5463 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5464 GenerateGcRootFieldLoad(
5465 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5466 break;
5467 }
5468 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5469 DCHECK(!kEmitCompilerReadBarrier);
5470 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5471 codegen_->RecordTypePatch(cls);
5472 break;
5473 case HLoadClass::LoadKind::kBootImageAddress: {
5474 DCHECK(!kEmitCompilerReadBarrier);
5475 DCHECK_NE(cls->GetAddress(), 0u);
5476 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5477 __ movl(out, Immediate(address)); // Zero-extended.
5478 codegen_->RecordSimplePatch();
5479 break;
5480 }
5481 case HLoadClass::LoadKind::kDexCacheAddress: {
5482 DCHECK_NE(cls->GetAddress(), 0u);
5483 // /* GcRoot<mirror::Class> */ out = *address
5484 if (IsUint<32>(cls->GetAddress())) {
5485 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
5486 GenerateGcRootFieldLoad(cls, out_loc, address);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005487 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005488 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5489 __ movq(out, Immediate(cls->GetAddress()));
5490 GenerateGcRootFieldLoad(cls, out_loc, Address(out, 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005491 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005492 generate_null_check = !cls->IsInDexCache();
5493 break;
5494 }
5495 case HLoadClass::LoadKind::kDexCachePcRelative: {
5496 uint32_t offset = cls->GetDexCacheElementOffset();
5497 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5498 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5499 /* no_rip */ false);
5500 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5501 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label);
5502 generate_null_check = !cls->IsInDexCache();
5503 break;
5504 }
5505 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5506 // /* GcRoot<mirror::Class>[] */ out =
5507 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5508 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5509 __ movq(out,
5510 Address(current_method,
5511 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5512 // /* GcRoot<mirror::Class> */ out = out[type_index]
5513 GenerateGcRootFieldLoad(
5514 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
5515 generate_null_check = !cls->IsInDexCache();
5516 break;
5517 }
5518 default:
5519 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5520 UNREACHABLE();
5521 }
5522
5523 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5524 DCHECK(cls->CanCallRuntime());
5525 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5526 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5527 codegen_->AddSlowPath(slow_path);
5528 if (generate_null_check) {
5529 __ testl(out, out);
5530 __ j(kEqual, slow_path->GetEntryLabel());
5531 }
5532 if (cls->MustGenerateClinitCheck()) {
5533 GenerateClassInitializationCheck(slow_path, out);
5534 } else {
5535 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005536 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005537 }
5538}
5539
5540void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5541 LocationSummary* locations =
5542 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5543 locations->SetInAt(0, Location::RequiresRegister());
5544 if (check->HasUses()) {
5545 locations->SetOut(Location::SameAsFirstInput());
5546 }
5547}
5548
5549void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005550 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005551 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005552 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005553 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005554 GenerateClassInitializationCheck(slow_path,
5555 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005556}
5557
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005558HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5559 HLoadString::LoadKind desired_string_load_kind) {
5560 if (kEmitCompilerReadBarrier) {
5561 switch (desired_string_load_kind) {
5562 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5563 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5564 case HLoadString::LoadKind::kBootImageAddress:
5565 // TODO: Implement for read barrier.
5566 return HLoadString::LoadKind::kDexCacheViaMethod;
5567 default:
5568 break;
5569 }
5570 }
5571 switch (desired_string_load_kind) {
5572 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5573 DCHECK(!GetCompilerOptions().GetCompilePic());
5574 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5575 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5576 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5577 DCHECK(GetCompilerOptions().GetCompilePic());
5578 break;
5579 case HLoadString::LoadKind::kBootImageAddress:
5580 break;
5581 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005582 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005583 break;
5584 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005585 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005586 break;
5587 case HLoadString::LoadKind::kDexCacheViaMethod:
5588 break;
5589 }
5590 return desired_string_load_kind;
5591}
5592
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005593void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005594 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005595 ? LocationSummary::kCallOnSlowPath
5596 : LocationSummary::kNoCall;
5597 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005598 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5599 locations->SetInAt(0, Location::RequiresRegister());
5600 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005601 locations->SetOut(Location::RequiresRegister());
5602}
5603
5604void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005605 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005606 Location out_loc = locations->Out();
5607 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005608
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005609 switch (load->GetLoadKind()) {
5610 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5611 DCHECK(!kEmitCompilerReadBarrier);
5612 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5613 codegen_->RecordStringPatch(load);
5614 return; // No dex cache slow path.
5615 }
5616 case HLoadString::LoadKind::kBootImageAddress: {
5617 DCHECK(!kEmitCompilerReadBarrier);
5618 DCHECK_NE(load->GetAddress(), 0u);
5619 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5620 __ movl(out, Immediate(address)); // Zero-extended.
5621 codegen_->RecordSimplePatch();
5622 return; // No dex cache slow path.
5623 }
5624 case HLoadString::LoadKind::kDexCacheAddress: {
5625 DCHECK_NE(load->GetAddress(), 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005626 // /* GcRoot<mirror::String> */ out = *address
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005627 if (IsUint<32>(load->GetAddress())) {
5628 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5629 GenerateGcRootFieldLoad(load, out_loc, address);
5630 } else {
5631 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5632 __ movq(out, Immediate(load->GetAddress()));
5633 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5634 }
5635 break;
5636 }
5637 case HLoadString::LoadKind::kDexCachePcRelative: {
5638 uint32_t offset = load->GetDexCacheElementOffset();
5639 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5640 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5641 /* no_rip */ false);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005642 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005643 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5644 break;
5645 }
5646 case HLoadString::LoadKind::kDexCacheViaMethod: {
5647 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5648
5649 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5650 GenerateGcRootFieldLoad(
5651 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5652 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5653 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5654 // /* GcRoot<mirror::String> */ out = out[string_index]
5655 GenerateGcRootFieldLoad(
5656 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5657 break;
5658 }
5659 default:
5660 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5661 UNREACHABLE();
5662 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005663
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005664 if (!load->IsInDexCache()) {
5665 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5666 codegen_->AddSlowPath(slow_path);
5667 __ testl(out, out);
5668 __ j(kEqual, slow_path->GetEntryLabel());
5669 __ Bind(slow_path->GetExitLabel());
5670 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005671}
5672
David Brazdilcb1c0552015-08-04 16:22:25 +01005673static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005674 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5675 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005676}
5677
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005678void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5679 LocationSummary* locations =
5680 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5681 locations->SetOut(Location::RequiresRegister());
5682}
5683
5684void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005685 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5686}
5687
5688void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5689 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5690}
5691
5692void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5693 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005694}
5695
5696void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5697 LocationSummary* locations =
5698 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5699 InvokeRuntimeCallingConvention calling_convention;
5700 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5701}
5702
5703void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005704 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5705 instruction,
5706 instruction->GetDexPc(),
5707 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005708 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005709}
5710
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005711static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5712 return kEmitCompilerReadBarrier &&
5713 (kUseBakerReadBarrier ||
5714 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5715 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5716 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5717}
5718
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005719void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005720 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005721 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5722 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005723 case TypeCheckKind::kExactCheck:
5724 case TypeCheckKind::kAbstractClassCheck:
5725 case TypeCheckKind::kClassHierarchyCheck:
5726 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005727 call_kind =
5728 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005729 break;
5730 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005731 case TypeCheckKind::kUnresolvedCheck:
5732 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005733 call_kind = LocationSummary::kCallOnSlowPath;
5734 break;
5735 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005736
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005737 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005738 locations->SetInAt(0, Location::RequiresRegister());
5739 locations->SetInAt(1, Location::Any());
5740 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5741 locations->SetOut(Location::RequiresRegister());
5742 // When read barriers are enabled, we need a temporary register for
5743 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005744 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005745 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005746 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005747}
5748
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005749void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005750 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005751 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005752 Location obj_loc = locations->InAt(0);
5753 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005754 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005755 Location out_loc = locations->Out();
5756 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005757 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005758 locations->GetTemp(0) :
5759 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005760 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005761 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5762 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5763 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005764 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005765 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005766
5767 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005768 // Avoid null check if we know obj is not null.
5769 if (instruction->MustDoNullCheck()) {
5770 __ testl(obj, obj);
5771 __ j(kEqual, &zero);
5772 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005773
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005775 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005776
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005777 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005778 case TypeCheckKind::kExactCheck: {
5779 if (cls.IsRegister()) {
5780 __ cmpl(out, cls.AsRegister<CpuRegister>());
5781 } else {
5782 DCHECK(cls.IsStackSlot()) << cls;
5783 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5784 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005785 if (zero.IsLinked()) {
5786 // Classes must be equal for the instanceof to succeed.
5787 __ j(kNotEqual, &zero);
5788 __ movl(out, Immediate(1));
5789 __ jmp(&done);
5790 } else {
5791 __ setcc(kEqual, out);
5792 // setcc only sets the low byte.
5793 __ andl(out, Immediate(1));
5794 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005795 break;
5796 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005797
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005798 case TypeCheckKind::kAbstractClassCheck: {
5799 // If the class is abstract, we eagerly fetch the super class of the
5800 // object to avoid doing a comparison we know will fail.
5801 NearLabel loop, success;
5802 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005803 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005804 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005805 __ testl(out, out);
5806 // If `out` is null, we use it for the result, and jump to `done`.
5807 __ j(kEqual, &done);
5808 if (cls.IsRegister()) {
5809 __ cmpl(out, cls.AsRegister<CpuRegister>());
5810 } else {
5811 DCHECK(cls.IsStackSlot()) << cls;
5812 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5813 }
5814 __ j(kNotEqual, &loop);
5815 __ movl(out, Immediate(1));
5816 if (zero.IsLinked()) {
5817 __ jmp(&done);
5818 }
5819 break;
5820 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005821
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005822 case TypeCheckKind::kClassHierarchyCheck: {
5823 // Walk over the class hierarchy to find a match.
5824 NearLabel loop, success;
5825 __ Bind(&loop);
5826 if (cls.IsRegister()) {
5827 __ cmpl(out, cls.AsRegister<CpuRegister>());
5828 } else {
5829 DCHECK(cls.IsStackSlot()) << cls;
5830 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5831 }
5832 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005833 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005834 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005835 __ testl(out, out);
5836 __ j(kNotEqual, &loop);
5837 // If `out` is null, we use it for the result, and jump to `done`.
5838 __ jmp(&done);
5839 __ Bind(&success);
5840 __ movl(out, Immediate(1));
5841 if (zero.IsLinked()) {
5842 __ jmp(&done);
5843 }
5844 break;
5845 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005846
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005847 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005848 // Do an exact check.
5849 NearLabel exact_check;
5850 if (cls.IsRegister()) {
5851 __ cmpl(out, cls.AsRegister<CpuRegister>());
5852 } else {
5853 DCHECK(cls.IsStackSlot()) << cls;
5854 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5855 }
5856 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005857 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005858 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005859 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005860 __ testl(out, out);
5861 // If `out` is null, we use it for the result, and jump to `done`.
5862 __ j(kEqual, &done);
5863 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5864 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005865 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005866 __ movl(out, Immediate(1));
5867 __ jmp(&done);
5868 break;
5869 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005870
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005871 case TypeCheckKind::kArrayCheck: {
5872 if (cls.IsRegister()) {
5873 __ cmpl(out, cls.AsRegister<CpuRegister>());
5874 } else {
5875 DCHECK(cls.IsStackSlot()) << cls;
5876 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5877 }
5878 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005879 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5880 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005881 codegen_->AddSlowPath(slow_path);
5882 __ j(kNotEqual, slow_path->GetEntryLabel());
5883 __ movl(out, Immediate(1));
5884 if (zero.IsLinked()) {
5885 __ jmp(&done);
5886 }
5887 break;
5888 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005889
Calin Juravle98893e12015-10-02 21:05:03 +01005890 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005891 case TypeCheckKind::kInterfaceCheck: {
5892 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005893 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005894 // cases.
5895 //
5896 // We cannot directly call the InstanceofNonTrivial runtime
5897 // entry point without resorting to a type checking slow path
5898 // here (i.e. by calling InvokeRuntime directly), as it would
5899 // require to assign fixed registers for the inputs of this
5900 // HInstanceOf instruction (following the runtime calling
5901 // convention), which might be cluttered by the potential first
5902 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005903 //
5904 // TODO: Introduce a new runtime entry point taking the object
5905 // to test (instead of its class) as argument, and let it deal
5906 // with the read barrier issues. This will let us refactor this
5907 // case of the `switch` code as it was previously (with a direct
5908 // call to the runtime not using a type checking slow path).
5909 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005910 DCHECK(locations->OnlyCallsOnSlowPath());
5911 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5912 /* is_fatal */ false);
5913 codegen_->AddSlowPath(slow_path);
5914 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005915 if (zero.IsLinked()) {
5916 __ jmp(&done);
5917 }
5918 break;
5919 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005920 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005921
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005922 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005923 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005924 __ xorl(out, out);
5925 }
5926
5927 if (done.IsLinked()) {
5928 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005929 }
5930
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005931 if (slow_path != nullptr) {
5932 __ Bind(slow_path->GetExitLabel());
5933 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005934}
5935
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005936void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005937 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5938 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005939 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5940 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005941 case TypeCheckKind::kExactCheck:
5942 case TypeCheckKind::kAbstractClassCheck:
5943 case TypeCheckKind::kClassHierarchyCheck:
5944 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005945 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5946 LocationSummary::kCallOnSlowPath :
5947 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005948 break;
5949 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005950 case TypeCheckKind::kUnresolvedCheck:
5951 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005952 call_kind = LocationSummary::kCallOnSlowPath;
5953 break;
5954 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005955 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5956 locations->SetInAt(0, Location::RequiresRegister());
5957 locations->SetInAt(1, Location::Any());
5958 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5959 locations->AddTemp(Location::RequiresRegister());
5960 // When read barriers are enabled, we need an additional temporary
5961 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005962 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005963 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005964 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005965}
5966
5967void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005968 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005969 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005970 Location obj_loc = locations->InAt(0);
5971 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005972 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005973 Location temp_loc = locations->GetTemp(0);
5974 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005975 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005976 locations->GetTemp(1) :
5977 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005978 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5979 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5980 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5981 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005982
Roland Levillain0d5a2812015-11-13 10:07:31 +00005983 bool is_type_check_slow_path_fatal =
5984 (type_check_kind == TypeCheckKind::kExactCheck ||
5985 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5986 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5987 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5988 !instruction->CanThrowIntoCatchBlock();
5989 SlowPathCode* type_check_slow_path =
5990 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5991 is_type_check_slow_path_fatal);
5992 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005993
Roland Levillain0d5a2812015-11-13 10:07:31 +00005994 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005995 case TypeCheckKind::kExactCheck:
5996 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005997 NearLabel done;
5998 // Avoid null check if we know obj is not null.
5999 if (instruction->MustDoNullCheck()) {
6000 __ testl(obj, obj);
6001 __ j(kEqual, &done);
6002 }
6003
6004 // /* HeapReference<Class> */ temp = obj->klass_
6005 GenerateReferenceLoadTwoRegisters(
6006 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6007
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006008 if (cls.IsRegister()) {
6009 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6010 } else {
6011 DCHECK(cls.IsStackSlot()) << cls;
6012 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6013 }
6014 // Jump to slow path for throwing the exception or doing a
6015 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006016 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006017 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006018 break;
6019 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006020
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006021 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006022 NearLabel done;
6023 // Avoid null check if we know obj is not null.
6024 if (instruction->MustDoNullCheck()) {
6025 __ testl(obj, obj);
6026 __ j(kEqual, &done);
6027 }
6028
6029 // /* HeapReference<Class> */ temp = obj->klass_
6030 GenerateReferenceLoadTwoRegisters(
6031 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6032
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006033 // If the class is abstract, we eagerly fetch the super class of the
6034 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006035 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006036 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006037 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006038 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006039
6040 // If the class reference currently in `temp` is not null, jump
6041 // to the `compare_classes` label to compare it with the checked
6042 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006043 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006044 __ j(kNotEqual, &compare_classes);
6045 // Otherwise, jump to the slow path to throw the exception.
6046 //
6047 // But before, move back the object's class into `temp` before
6048 // going into the slow path, as it has been overwritten in the
6049 // meantime.
6050 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006051 GenerateReferenceLoadTwoRegisters(
6052 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006053 __ jmp(type_check_slow_path->GetEntryLabel());
6054
6055 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006056 if (cls.IsRegister()) {
6057 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6058 } else {
6059 DCHECK(cls.IsStackSlot()) << cls;
6060 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6061 }
6062 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006063 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006064 break;
6065 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006066
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006067 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006068 NearLabel done;
6069 // Avoid null check if we know obj is not null.
6070 if (instruction->MustDoNullCheck()) {
6071 __ testl(obj, obj);
6072 __ j(kEqual, &done);
6073 }
6074
6075 // /* HeapReference<Class> */ temp = obj->klass_
6076 GenerateReferenceLoadTwoRegisters(
6077 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6078
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006079 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006080 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006081 __ Bind(&loop);
6082 if (cls.IsRegister()) {
6083 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6084 } else {
6085 DCHECK(cls.IsStackSlot()) << cls;
6086 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6087 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006088 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006089
Roland Levillain0d5a2812015-11-13 10:07:31 +00006090 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006091 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006092
6093 // If the class reference currently in `temp` is not null, jump
6094 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006095 __ testl(temp, temp);
6096 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006097 // Otherwise, jump to the slow path to throw the exception.
6098 //
6099 // But before, move back the object's class into `temp` before
6100 // going into the slow path, as it has been overwritten in the
6101 // meantime.
6102 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006103 GenerateReferenceLoadTwoRegisters(
6104 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006105 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006106 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006107 break;
6108 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006109
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006110 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006111 // We cannot use a NearLabel here, as its range might be too
6112 // short in some cases when read barriers are enabled. This has
6113 // been observed for instance when the code emitted for this
6114 // case uses high x86-64 registers (R8-R15).
6115 Label done;
6116 // Avoid null check if we know obj is not null.
6117 if (instruction->MustDoNullCheck()) {
6118 __ testl(obj, obj);
6119 __ j(kEqual, &done);
6120 }
6121
6122 // /* HeapReference<Class> */ temp = obj->klass_
6123 GenerateReferenceLoadTwoRegisters(
6124 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6125
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006126 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006127 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006128 if (cls.IsRegister()) {
6129 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6130 } else {
6131 DCHECK(cls.IsStackSlot()) << cls;
6132 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6133 }
6134 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006135
6136 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006137 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006138 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006139
6140 // If the component type is not null (i.e. the object is indeed
6141 // an array), jump to label `check_non_primitive_component_type`
6142 // to further check that this component type is not a primitive
6143 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006144 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006145 __ j(kNotEqual, &check_non_primitive_component_type);
6146 // Otherwise, jump to the slow path to throw the exception.
6147 //
6148 // But before, move back the object's class into `temp` before
6149 // going into the slow path, as it has been overwritten in the
6150 // meantime.
6151 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006152 GenerateReferenceLoadTwoRegisters(
6153 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006154 __ jmp(type_check_slow_path->GetEntryLabel());
6155
6156 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006157 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006158 __ j(kEqual, &done);
6159 // Same comment as above regarding `temp` and the slow path.
6160 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006161 GenerateReferenceLoadTwoRegisters(
6162 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006163 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006164 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006165 break;
6166 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006167
Calin Juravle98893e12015-10-02 21:05:03 +01006168 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006169 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006170 NearLabel done;
6171 // Avoid null check if we know obj is not null.
6172 if (instruction->MustDoNullCheck()) {
6173 __ testl(obj, obj);
6174 __ j(kEqual, &done);
6175 }
6176
6177 // /* HeapReference<Class> */ temp = obj->klass_
6178 GenerateReferenceLoadTwoRegisters(
6179 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6180
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006181 // We always go into the type check slow path for the unresolved
6182 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006183 //
6184 // We cannot directly call the CheckCast runtime entry point
6185 // without resorting to a type checking slow path here (i.e. by
6186 // calling InvokeRuntime directly), as it would require to
6187 // assign fixed registers for the inputs of this HInstanceOf
6188 // instruction (following the runtime calling convention), which
6189 // might be cluttered by the potential first read barrier
6190 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006191 //
6192 // TODO: Introduce a new runtime entry point taking the object
6193 // to test (instead of its class) as argument, and let it deal
6194 // with the read barrier issues. This will let us refactor this
6195 // case of the `switch` code as it was previously (with a direct
6196 // call to the runtime not using a type checking slow path).
6197 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006198 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006199 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006200 break;
6201 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006202
Roland Levillain0d5a2812015-11-13 10:07:31 +00006203 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006204}
6205
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006206void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6207 LocationSummary* locations =
6208 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
6209 InvokeRuntimeCallingConvention calling_convention;
6210 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6211}
6212
6213void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006214 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6215 : QUICK_ENTRY_POINT(pUnlockObject),
6216 instruction,
6217 instruction->GetDexPc(),
6218 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006219 if (instruction->IsEnter()) {
6220 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6221 } else {
6222 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6223 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006224}
6225
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006226void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6227void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6228void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6229
6230void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6231 LocationSummary* locations =
6232 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6233 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6234 || instruction->GetResultType() == Primitive::kPrimLong);
6235 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006236 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006237 locations->SetOut(Location::SameAsFirstInput());
6238}
6239
6240void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6241 HandleBitwiseOperation(instruction);
6242}
6243
6244void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6245 HandleBitwiseOperation(instruction);
6246}
6247
6248void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6249 HandleBitwiseOperation(instruction);
6250}
6251
6252void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6253 LocationSummary* locations = instruction->GetLocations();
6254 Location first = locations->InAt(0);
6255 Location second = locations->InAt(1);
6256 DCHECK(first.Equals(locations->Out()));
6257
6258 if (instruction->GetResultType() == Primitive::kPrimInt) {
6259 if (second.IsRegister()) {
6260 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006261 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006262 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006263 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006264 } else {
6265 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006266 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006267 }
6268 } else if (second.IsConstant()) {
6269 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6270 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006271 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006272 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006273 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006274 } else {
6275 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006276 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006277 }
6278 } else {
6279 Address address(CpuRegister(RSP), second.GetStackIndex());
6280 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006281 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006282 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006283 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006284 } else {
6285 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006286 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006287 }
6288 }
6289 } else {
6290 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006291 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6292 bool second_is_constant = false;
6293 int64_t value = 0;
6294 if (second.IsConstant()) {
6295 second_is_constant = true;
6296 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006297 }
Mark Mendell40741f32015-04-20 22:10:34 -04006298 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006299
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006300 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006301 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006302 if (is_int32_value) {
6303 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6304 } else {
6305 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6306 }
6307 } else if (second.IsDoubleStackSlot()) {
6308 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006309 } else {
6310 __ andq(first_reg, second.AsRegister<CpuRegister>());
6311 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006312 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006313 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006314 if (is_int32_value) {
6315 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6316 } else {
6317 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6318 }
6319 } else if (second.IsDoubleStackSlot()) {
6320 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006321 } else {
6322 __ orq(first_reg, second.AsRegister<CpuRegister>());
6323 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006324 } else {
6325 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006326 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006327 if (is_int32_value) {
6328 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6329 } else {
6330 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6331 }
6332 } else if (second.IsDoubleStackSlot()) {
6333 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006334 } else {
6335 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6336 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006337 }
6338 }
6339}
6340
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006341void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6342 Location out,
6343 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006344 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006345 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6346 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006347 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006348 if (kUseBakerReadBarrier) {
6349 // Load with fast path based Baker's read barrier.
6350 // /* HeapReference<Object> */ out = *(out + offset)
6351 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006352 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006353 } else {
6354 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006355 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006356 // in the following move operation, as we will need it for the
6357 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006358 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006359 // /* HeapReference<Object> */ out = *(out + offset)
6360 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006361 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006362 }
6363 } else {
6364 // Plain load with no read barrier.
6365 // /* HeapReference<Object> */ out = *(out + offset)
6366 __ movl(out_reg, Address(out_reg, offset));
6367 __ MaybeUnpoisonHeapReference(out_reg);
6368 }
6369}
6370
6371void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6372 Location out,
6373 Location obj,
6374 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006375 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006376 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6377 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6378 if (kEmitCompilerReadBarrier) {
6379 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006380 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006381 // Load with fast path based Baker's read barrier.
6382 // /* HeapReference<Object> */ out = *(obj + offset)
6383 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006384 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006385 } else {
6386 // Load with slow path based read barrier.
6387 // /* HeapReference<Object> */ out = *(obj + offset)
6388 __ movl(out_reg, Address(obj_reg, offset));
6389 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6390 }
6391 } else {
6392 // Plain load with no read barrier.
6393 // /* HeapReference<Object> */ out = *(obj + offset)
6394 __ movl(out_reg, Address(obj_reg, offset));
6395 __ MaybeUnpoisonHeapReference(out_reg);
6396 }
6397}
6398
6399void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6400 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006401 const Address& address,
6402 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006403 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6404 if (kEmitCompilerReadBarrier) {
6405 if (kUseBakerReadBarrier) {
6406 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6407 // Baker's read barrier are used:
6408 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006409 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006410 // if (Thread::Current()->GetIsGcMarking()) {
6411 // root = ReadBarrier::Mark(root)
6412 // }
6413
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006414 // /* GcRoot<mirror::Object> */ root = *address
6415 __ movl(root_reg, address);
6416 if (fixup_label != nullptr) {
6417 __ Bind(fixup_label);
6418 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006419 static_assert(
6420 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6421 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6422 "have different sizes.");
6423 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6424 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6425 "have different sizes.");
6426
6427 // Slow path used to mark the GC root `root`.
6428 SlowPathCode* slow_path =
6429 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6430 codegen_->AddSlowPath(slow_path);
6431
6432 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6433 /* no_rip */ true),
6434 Immediate(0));
6435 __ j(kNotEqual, slow_path->GetEntryLabel());
6436 __ Bind(slow_path->GetExitLabel());
6437 } else {
6438 // GC root loaded through a slow path for read barriers other
6439 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006440 // /* GcRoot<mirror::Object>* */ root = address
6441 __ leaq(root_reg, address);
6442 if (fixup_label != nullptr) {
6443 __ Bind(fixup_label);
6444 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006445 // /* mirror::Object* */ root = root->Read()
6446 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6447 }
6448 } else {
6449 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006450 // /* GcRoot<mirror::Object> */ root = *address
6451 __ movl(root_reg, address);
6452 if (fixup_label != nullptr) {
6453 __ Bind(fixup_label);
6454 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006455 // Note that GC roots are not affected by heap poisoning, thus we
6456 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006457 }
6458}
6459
6460void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6461 Location ref,
6462 CpuRegister obj,
6463 uint32_t offset,
6464 Location temp,
6465 bool needs_null_check) {
6466 DCHECK(kEmitCompilerReadBarrier);
6467 DCHECK(kUseBakerReadBarrier);
6468
6469 // /* HeapReference<Object> */ ref = *(obj + offset)
6470 Address src(obj, offset);
6471 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6472}
6473
6474void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6475 Location ref,
6476 CpuRegister obj,
6477 uint32_t data_offset,
6478 Location index,
6479 Location temp,
6480 bool needs_null_check) {
6481 DCHECK(kEmitCompilerReadBarrier);
6482 DCHECK(kUseBakerReadBarrier);
6483
Roland Levillain3d312422016-06-23 13:53:42 +01006484 static_assert(
6485 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6486 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006487 // /* HeapReference<Object> */ ref =
6488 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6489 Address src = index.IsConstant() ?
6490 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6491 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6492 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6493}
6494
6495void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6496 Location ref,
6497 CpuRegister obj,
6498 const Address& src,
6499 Location temp,
6500 bool needs_null_check) {
6501 DCHECK(kEmitCompilerReadBarrier);
6502 DCHECK(kUseBakerReadBarrier);
6503
6504 // In slow path based read barriers, the read barrier call is
6505 // inserted after the original load. However, in fast path based
6506 // Baker's read barriers, we need to perform the load of
6507 // mirror::Object::monitor_ *before* the original reference load.
6508 // This load-load ordering is required by the read barrier.
6509 // The fast path/slow path (for Baker's algorithm) should look like:
6510 //
6511 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6512 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6513 // HeapReference<Object> ref = *src; // Original reference load.
6514 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6515 // if (is_gray) {
6516 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6517 // }
6518 //
6519 // Note: the original implementation in ReadBarrier::Barrier is
6520 // slightly more complex as:
6521 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006522 // the high-bits of rb_state, which are expected to be all zeroes
6523 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6524 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006525 // - it performs additional checks that we do not do here for
6526 // performance reasons.
6527
6528 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6529 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6530 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6531
6532 // /* int32_t */ monitor = obj->monitor_
6533 __ movl(temp_reg, Address(obj, monitor_offset));
6534 if (needs_null_check) {
6535 MaybeRecordImplicitNullCheck(instruction);
6536 }
6537 // /* LockWord */ lock_word = LockWord(monitor)
6538 static_assert(sizeof(LockWord) == sizeof(int32_t),
6539 "art::LockWord and int32_t have different sizes.");
6540 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6541 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6542 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6543 static_assert(
6544 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6545 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6546
6547 // Load fence to prevent load-load reordering.
6548 // Note that this is a no-op, thanks to the x86-64 memory model.
6549 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6550
6551 // The actual reference load.
6552 // /* HeapReference<Object> */ ref = *src
6553 __ movl(ref_reg, src);
6554
6555 // Object* ref = ref_addr->AsMirrorPtr()
6556 __ MaybeUnpoisonHeapReference(ref_reg);
6557
6558 // Slow path used to mark the object `ref` when it is gray.
6559 SlowPathCode* slow_path =
6560 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6561 AddSlowPath(slow_path);
6562
6563 // if (rb_state == ReadBarrier::gray_ptr_)
6564 // ref = ReadBarrier::Mark(ref);
6565 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6566 __ j(kEqual, slow_path->GetEntryLabel());
6567 __ Bind(slow_path->GetExitLabel());
6568}
6569
6570void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6571 Location out,
6572 Location ref,
6573 Location obj,
6574 uint32_t offset,
6575 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006576 DCHECK(kEmitCompilerReadBarrier);
6577
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006578 // Insert a slow path based read barrier *after* the reference load.
6579 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006580 // If heap poisoning is enabled, the unpoisoning of the loaded
6581 // reference will be carried out by the runtime within the slow
6582 // path.
6583 //
6584 // Note that `ref` currently does not get unpoisoned (when heap
6585 // poisoning is enabled), which is alright as the `ref` argument is
6586 // not used by the artReadBarrierSlow entry point.
6587 //
6588 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6589 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6590 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6591 AddSlowPath(slow_path);
6592
Roland Levillain0d5a2812015-11-13 10:07:31 +00006593 __ jmp(slow_path->GetEntryLabel());
6594 __ Bind(slow_path->GetExitLabel());
6595}
6596
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006597void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6598 Location out,
6599 Location ref,
6600 Location obj,
6601 uint32_t offset,
6602 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006603 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006604 // Baker's read barriers shall be handled by the fast path
6605 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6606 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006607 // If heap poisoning is enabled, unpoisoning will be taken care of
6608 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006609 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006610 } else if (kPoisonHeapReferences) {
6611 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6612 }
6613}
6614
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006615void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6616 Location out,
6617 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006618 DCHECK(kEmitCompilerReadBarrier);
6619
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006620 // Insert a slow path based read barrier *after* the GC root load.
6621 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006622 // Note that GC roots are not affected by heap poisoning, so we do
6623 // not need to do anything special for this here.
6624 SlowPathCode* slow_path =
6625 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6626 AddSlowPath(slow_path);
6627
Roland Levillain0d5a2812015-11-13 10:07:31 +00006628 __ jmp(slow_path->GetEntryLabel());
6629 __ Bind(slow_path->GetExitLabel());
6630}
6631
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006632void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006633 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006634 LOG(FATAL) << "Unreachable";
6635}
6636
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006637void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006638 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006639 LOG(FATAL) << "Unreachable";
6640}
6641
Mark Mendellfe57faa2015-09-18 09:26:15 -04006642// Simple implementation of packed switch - generate cascaded compare/jumps.
6643void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6644 LocationSummary* locations =
6645 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6646 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006647 locations->AddTemp(Location::RequiresRegister());
6648 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006649}
6650
6651void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6652 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006653 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006654 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006655 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6656 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6657 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006658 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6659
6660 // Should we generate smaller inline compare/jumps?
6661 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6662 // Figure out the correct compare values and jump conditions.
6663 // Handle the first compare/branch as a special case because it might
6664 // jump to the default case.
6665 DCHECK_GT(num_entries, 2u);
6666 Condition first_condition;
6667 uint32_t index;
6668 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6669 if (lower_bound != 0) {
6670 first_condition = kLess;
6671 __ cmpl(value_reg_in, Immediate(lower_bound));
6672 __ j(first_condition, codegen_->GetLabelOf(default_block));
6673 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6674
6675 index = 1;
6676 } else {
6677 // Handle all the compare/jumps below.
6678 first_condition = kBelow;
6679 index = 0;
6680 }
6681
6682 // Handle the rest of the compare/jumps.
6683 for (; index + 1 < num_entries; index += 2) {
6684 int32_t compare_to_value = lower_bound + index + 1;
6685 __ cmpl(value_reg_in, Immediate(compare_to_value));
6686 // Jump to successors[index] if value < case_value[index].
6687 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6688 // Jump to successors[index + 1] if value == case_value[index + 1].
6689 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6690 }
6691
6692 if (index != num_entries) {
6693 // There are an odd number of entries. Handle the last one.
6694 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006695 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006696 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6697 }
6698
6699 // And the default for any other value.
6700 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6701 __ jmp(codegen_->GetLabelOf(default_block));
6702 }
6703 return;
6704 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006705
6706 // Remove the bias, if needed.
6707 Register value_reg_out = value_reg_in.AsRegister();
6708 if (lower_bound != 0) {
6709 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6710 value_reg_out = temp_reg.AsRegister();
6711 }
6712 CpuRegister value_reg(value_reg_out);
6713
6714 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006715 __ cmpl(value_reg, Immediate(num_entries - 1));
6716 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006717
Mark Mendell9c86b482015-09-18 13:36:07 -04006718 // We are in the range of the table.
6719 // Load the address of the jump table in the constant area.
6720 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006721
Mark Mendell9c86b482015-09-18 13:36:07 -04006722 // Load the (signed) offset from the jump table.
6723 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6724
6725 // Add the offset to the address of the table base.
6726 __ addq(temp_reg, base_reg);
6727
6728 // And jump.
6729 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006730}
6731
Aart Bikc5d47542016-01-27 17:00:35 -08006732void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6733 if (value == 0) {
6734 __ xorl(dest, dest);
6735 } else {
6736 __ movl(dest, Immediate(value));
6737 }
6738}
6739
Mark Mendell92e83bf2015-05-07 11:25:03 -04006740void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6741 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006742 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006743 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006744 } else if (IsUint<32>(value)) {
6745 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006746 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6747 } else {
6748 __ movq(dest, Immediate(value));
6749 }
6750}
6751
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006752void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6753 if (value == 0) {
6754 __ xorps(dest, dest);
6755 } else {
6756 __ movss(dest, LiteralInt32Address(value));
6757 }
6758}
6759
6760void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6761 if (value == 0) {
6762 __ xorpd(dest, dest);
6763 } else {
6764 __ movsd(dest, LiteralInt64Address(value));
6765 }
6766}
6767
6768void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6769 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6770}
6771
6772void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6773 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6774}
6775
Aart Bika19616e2016-02-01 18:57:58 -08006776void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6777 if (value == 0) {
6778 __ testl(dest, dest);
6779 } else {
6780 __ cmpl(dest, Immediate(value));
6781 }
6782}
6783
6784void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6785 if (IsInt<32>(value)) {
6786 if (value == 0) {
6787 __ testq(dest, dest);
6788 } else {
6789 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6790 }
6791 } else {
6792 // Value won't fit in an int.
6793 __ cmpq(dest, LiteralInt64Address(value));
6794 }
6795}
6796
Mark Mendellcfa410b2015-05-25 16:02:44 -04006797void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6798 DCHECK(dest.IsDoubleStackSlot());
6799 if (IsInt<32>(value)) {
6800 // Can move directly as an int32 constant.
6801 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6802 Immediate(static_cast<int32_t>(value)));
6803 } else {
6804 Load64BitValue(CpuRegister(TMP), value);
6805 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6806 }
6807}
6808
Mark Mendell9c86b482015-09-18 13:36:07 -04006809/**
6810 * Class to handle late fixup of offsets into constant area.
6811 */
6812class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6813 public:
6814 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6815 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6816
6817 protected:
6818 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6819
6820 CodeGeneratorX86_64* codegen_;
6821
6822 private:
6823 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6824 // Patch the correct offset for the instruction. We use the address of the
6825 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6826 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6827 int32_t relative_position = constant_offset - pos;
6828
6829 // Patch in the right value.
6830 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6831 }
6832
6833 // Location in constant area that the fixup refers to.
6834 size_t offset_into_constant_area_;
6835};
6836
6837/**
6838 t * Class to handle late fixup of offsets to a jump table that will be created in the
6839 * constant area.
6840 */
6841class JumpTableRIPFixup : public RIPFixup {
6842 public:
6843 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6844 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6845
6846 void CreateJumpTable() {
6847 X86_64Assembler* assembler = codegen_->GetAssembler();
6848
6849 // Ensure that the reference to the jump table has the correct offset.
6850 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6851 SetOffset(offset_in_constant_table);
6852
6853 // Compute the offset from the start of the function to this jump table.
6854 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6855
6856 // Populate the jump table with the correct values for the jump table.
6857 int32_t num_entries = switch_instr_->GetNumEntries();
6858 HBasicBlock* block = switch_instr_->GetBlock();
6859 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6860 // The value that we want is the target offset - the position of the table.
6861 for (int32_t i = 0; i < num_entries; i++) {
6862 HBasicBlock* b = successors[i];
6863 Label* l = codegen_->GetLabelOf(b);
6864 DCHECK(l->IsBound());
6865 int32_t offset_to_block = l->Position() - current_table_offset;
6866 assembler->AppendInt32(offset_to_block);
6867 }
6868 }
6869
6870 private:
6871 const HPackedSwitch* switch_instr_;
6872};
6873
Mark Mendellf55c3e02015-03-26 21:07:46 -04006874void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6875 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006876 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006877 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6878 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006879 assembler->Align(4, 0);
6880 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006881
6882 // Populate any jump tables.
6883 for (auto jump_table : fixups_to_jump_tables_) {
6884 jump_table->CreateJumpTable();
6885 }
6886
6887 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006888 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006889 }
6890
6891 // And finish up.
6892 CodeGenerator::Finalize(allocator);
6893}
6894
Mark Mendellf55c3e02015-03-26 21:07:46 -04006895Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6896 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6897 return Address::RIP(fixup);
6898}
6899
6900Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6901 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6902 return Address::RIP(fixup);
6903}
6904
6905Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6906 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6907 return Address::RIP(fixup);
6908}
6909
6910Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6911 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6912 return Address::RIP(fixup);
6913}
6914
Andreas Gampe85b62f22015-09-09 13:15:38 -07006915// TODO: trg as memory.
6916void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6917 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006918 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006919 return;
6920 }
6921
6922 DCHECK_NE(type, Primitive::kPrimVoid);
6923
6924 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6925 if (trg.Equals(return_loc)) {
6926 return;
6927 }
6928
6929 // Let the parallel move resolver take care of all of this.
6930 HParallelMove parallel_move(GetGraph()->GetArena());
6931 parallel_move.AddMove(return_loc, trg, type, nullptr);
6932 GetMoveResolver()->EmitNativeCode(&parallel_move);
6933}
6934
Mark Mendell9c86b482015-09-18 13:36:07 -04006935Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6936 // Create a fixup to be used to create and address the jump table.
6937 JumpTableRIPFixup* table_fixup =
6938 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6939
6940 // We have to populate the jump tables.
6941 fixups_to_jump_tables_.push_back(table_fixup);
6942 return Address::RIP(table_fixup);
6943}
6944
Mark Mendellea5af682015-10-22 17:35:49 -04006945void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6946 const Address& addr_high,
6947 int64_t v,
6948 HInstruction* instruction) {
6949 if (IsInt<32>(v)) {
6950 int32_t v_32 = v;
6951 __ movq(addr_low, Immediate(v_32));
6952 MaybeRecordImplicitNullCheck(instruction);
6953 } else {
6954 // Didn't fit in a register. Do it in pieces.
6955 int32_t low_v = Low32Bits(v);
6956 int32_t high_v = High32Bits(v);
6957 __ movl(addr_low, Immediate(low_v));
6958 MaybeRecordImplicitNullCheck(instruction);
6959 __ movl(addr_high, Immediate(high_v));
6960 }
6961}
6962
Roland Levillain4d027112015-07-01 15:41:14 +01006963#undef __
6964
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006965} // namespace x86_64
6966} // namespace art