blob: 0d85bea8c0a9bbce1a8717e8164f605f481f622c [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Calin Juravle175dc732015-08-25 15:42:32 +010056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400197 // Are we using an array length from memory?
198 HInstruction* array_length = instruction_->InputAt(1);
199 Location length_loc = locations->InAt(1);
200 InvokeRuntimeCallingConvention calling_convention;
201 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
202 // Load the array length into our temporary.
203 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
204 Location array_loc = array_length->GetLocations()->InAt(0);
205 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
206 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
207 // Check for conflicts with index.
208 if (length_loc.Equals(locations->InAt(0))) {
209 // We know we aren't using parameter 2.
210 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
211 }
212 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
213 }
214
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000215 // We're moving two locations to locations that could overlap, so we need a parallel
216 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000217 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100218 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000219 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100220 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400221 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100222 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
223 Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100224 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
225 ? QUICK_ENTRY_POINT(pThrowStringBounds)
226 : QUICK_ENTRY_POINT(pThrowArrayBounds);
227 x86_64_codegen->InvokeRuntime(entry_point_offset,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000228 instruction_,
229 instruction_->GetDexPc(),
230 this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100231 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000232 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100233 }
234
Alexandre Rames8158f282015-08-07 10:26:17 +0100235 bool IsFatal() const OVERRIDE { return true; }
236
Alexandre Rames9931f312015-06-19 14:47:01 +0100237 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
238
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100240 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
241};
242
Andreas Gampe85b62f22015-09-09 13:15:38 -0700243class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000245 LoadClassSlowPathX86_64(HLoadClass* cls,
246 HInstruction* at,
247 uint32_t dex_pc,
248 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000249 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000250 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
251 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000253 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000254 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000255 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100257
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000258 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000259
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000261 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000262 x86_64_codegen->InvokeRuntime(do_clinit_ ?
263 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
264 QUICK_ENTRY_POINT(pInitializeType),
265 at_,
266 dex_pc_,
267 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000268 if (do_clinit_) {
269 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
270 } else {
271 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
272 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000274 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000276 if (out.IsValid()) {
277 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000278 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000279 }
280
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000281 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100282 __ jmp(GetExitLabel());
283 }
284
Alexandre Rames9931f312015-06-19 14:47:01 +0100285 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
286
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100287 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000288 // The class this slow path will load.
289 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100290
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000291 // The instruction where this slow path is happening.
292 // (Might be the load class or an initialization check).
293 HInstruction* const at_;
294
295 // The dex PC of `at_`.
296 const uint32_t dex_pc_;
297
298 // Whether to initialize the class.
299 const bool do_clinit_;
300
301 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100302};
303
Andreas Gampe85b62f22015-09-09 13:15:38 -0700304class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000305 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000306 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000307
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000308 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 LocationSummary* locations = instruction_->GetLocations();
310 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
311
Roland Levillain0d5a2812015-11-13 10:07:31 +0000312 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000313 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000314 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000315
316 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000317 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
318 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000319 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
320 instruction_,
321 instruction_->GetDexPc(),
322 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000323 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000324 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000325 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000326 __ jmp(GetExitLabel());
327 }
328
Alexandre Rames9931f312015-06-19 14:47:01 +0100329 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
330
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000331 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000332 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
333};
334
Andreas Gampe85b62f22015-09-09 13:15:38 -0700335class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000337 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000338 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100342 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
343 : locations->Out();
344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000351 if (!is_fatal_) {
352 SaveLiveRegisters(codegen, locations);
353 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000354
355 // We're moving two locations to locations that could overlap, so we need a parallel
356 // move resolver.
357 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000358 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100359 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000360 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100361 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100362 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100363 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
364 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000365
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
368 instruction_,
369 dex_pc,
370 this);
371 CheckEntrypointTypes<
372 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000373 } else {
374 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000375 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
376 instruction_,
377 dex_pc,
378 this);
379 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000380 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000381
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000382 if (!is_fatal_) {
383 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000384 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000386
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 RestoreLiveRegisters(codegen, locations);
388 __ jmp(GetExitLabel());
389 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000390 }
391
Alexandre Rames9931f312015-06-19 14:47:01 +0100392 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
393
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000394 bool IsFatal() const OVERRIDE { return is_fatal_; }
395
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000397 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000398
399 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
400};
401
Andreas Gampe85b62f22015-09-09 13:15:38 -0700402class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 public:
Aart Bik42249c32016-01-07 15:33:50 -0800404 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000408 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000411 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800412 instruction_,
413 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000414 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000415 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Alexandre Rames9931f312015-06-19 14:47:01 +0100418 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
428 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
434 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
438 Primitive::kPrimNot,
439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
443 Primitive::kPrimInt,
444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
448 Primitive::kPrimNot,
449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
453 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
454 instruction_,
455 instruction_->GetDexPc(),
456 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000457 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100458 RestoreLiveRegisters(codegen, locations);
459 __ jmp(GetExitLabel());
460 }
461
462 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
463
464 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100465 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
466};
467
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468// Slow path marking an object during a read barrier.
469class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
470 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100471 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location obj)
472 : SlowPathCode(instruction), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000473 DCHECK(kEmitCompilerReadBarrier);
474 }
475
476 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
477
478 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
479 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain02b75802016-07-13 11:54:35 +0100480 Register reg = obj_.AsRegister<Register>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000481 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100482 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000483 DCHECK(instruction_->IsInstanceFieldGet() ||
484 instruction_->IsStaticFieldGet() ||
485 instruction_->IsArrayGet() ||
486 instruction_->IsLoadClass() ||
487 instruction_->IsLoadString() ||
488 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100489 instruction_->IsCheckCast() ||
490 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
491 instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000492 << "Unexpected instruction in read barrier marking slow path: "
493 << instruction_->DebugName();
494
495 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100496 // No need to save live registers; it's taken care of by the
497 // entrypoint. Also, there is no need to update the stack mask,
498 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 InvokeRuntimeCallingConvention calling_convention;
500 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100501 DCHECK_NE(reg, RSP);
502 DCHECK(0 <= reg && reg < kNumberOfCpuRegisters) << reg;
503 // "Compact" slow path, saving two moves.
504 //
505 // Instead of using the standard runtime calling convention (input
506 // and output in R0):
507 //
508 // RDI <- obj
509 // RAX <- ReadBarrierMark(RDI)
510 // obj <- RAX
511 //
512 // we just use rX (the register holding `obj`) as input and output
513 // of a dedicated entrypoint:
514 //
515 // rX <- ReadBarrierMarkRegX(rX)
516 //
517 int32_t entry_point_offset =
518 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64WordSize>(reg);
519 // TODO: Do not emit a stack map for this runtime call.
520 x86_64_codegen->InvokeRuntime(entry_point_offset,
521 instruction_,
522 instruction_->GetDexPc(),
523 this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000524 __ jmp(GetExitLabel());
525 }
526
527 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000528 const Location obj_;
529
530 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
531};
532
Roland Levillain0d5a2812015-11-13 10:07:31 +0000533// Slow path generating a read barrier for a heap reference.
534class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
535 public:
536 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
537 Location out,
538 Location ref,
539 Location obj,
540 uint32_t offset,
541 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000542 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000543 out_(out),
544 ref_(ref),
545 obj_(obj),
546 offset_(offset),
547 index_(index) {
548 DCHECK(kEmitCompilerReadBarrier);
549 // If `obj` is equal to `out` or `ref`, it means the initial
550 // object has been overwritten by (or after) the heap object
551 // reference load to be instrumented, e.g.:
552 //
553 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000554 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000555 //
556 // In that case, we have lost the information about the original
557 // object, and the emitted read barrier cannot work properly.
558 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
559 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
560}
561
562 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
563 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
564 LocationSummary* locations = instruction_->GetLocations();
565 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
566 DCHECK(locations->CanCall());
567 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100568 DCHECK(instruction_->IsInstanceFieldGet() ||
569 instruction_->IsStaticFieldGet() ||
570 instruction_->IsArrayGet() ||
571 instruction_->IsInstanceOf() ||
572 instruction_->IsCheckCast() ||
573 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000574 instruction_->GetLocations()->Intrinsified()))
575 << "Unexpected instruction in read barrier for heap reference slow path: "
576 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000577
578 __ Bind(GetEntryLabel());
579 SaveLiveRegisters(codegen, locations);
580
581 // We may have to change the index's value, but as `index_` is a
582 // constant member (like other "inputs" of this slow path),
583 // introduce a copy of it, `index`.
584 Location index = index_;
585 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100586 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000587 if (instruction_->IsArrayGet()) {
588 // Compute real offset and store it in index_.
589 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
590 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
591 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
592 // We are about to change the value of `index_reg` (see the
593 // calls to art::x86_64::X86_64Assembler::shll and
594 // art::x86_64::X86_64Assembler::AddImmediate below), but it
595 // has not been saved by the previous call to
596 // art::SlowPathCode::SaveLiveRegisters, as it is a
597 // callee-save register --
598 // art::SlowPathCode::SaveLiveRegisters does not consider
599 // callee-save registers, as it has been designed with the
600 // assumption that callee-save registers are supposed to be
601 // handled by the called function. So, as a callee-save
602 // register, `index_reg` _would_ eventually be saved onto
603 // the stack, but it would be too late: we would have
604 // changed its value earlier. Therefore, we manually save
605 // it here into another freely available register,
606 // `free_reg`, chosen of course among the caller-save
607 // registers (as a callee-save `free_reg` register would
608 // exhibit the same problem).
609 //
610 // Note we could have requested a temporary register from
611 // the register allocator instead; but we prefer not to, as
612 // this is a slow path, and we know we can find a
613 // caller-save register that is available.
614 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
615 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
616 index_reg = free_reg;
617 index = Location::RegisterLocation(index_reg);
618 } else {
619 // The initial register stored in `index_` has already been
620 // saved in the call to art::SlowPathCode::SaveLiveRegisters
621 // (as it is not a callee-save register), so we can freely
622 // use it.
623 }
624 // Shifting the index value contained in `index_reg` by the
625 // scale factor (2) cannot overflow in practice, as the
626 // runtime is unable to allocate object arrays with a size
627 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
628 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
629 static_assert(
630 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
631 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
632 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
633 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100634 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
635 // intrinsics, `index_` is not shifted by a scale factor of 2
636 // (as in the case of ArrayGet), as it is actually an offset
637 // to an object field within an object.
638 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000639 DCHECK(instruction_->GetLocations()->Intrinsified());
640 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
641 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
642 << instruction_->AsInvoke()->GetIntrinsic();
643 DCHECK_EQ(offset_, 0U);
644 DCHECK(index_.IsRegister());
645 }
646 }
647
648 // We're moving two or three locations to locations that could
649 // overlap, so we need a parallel move resolver.
650 InvokeRuntimeCallingConvention calling_convention;
651 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
652 parallel_move.AddMove(ref_,
653 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
654 Primitive::kPrimNot,
655 nullptr);
656 parallel_move.AddMove(obj_,
657 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
658 Primitive::kPrimNot,
659 nullptr);
660 if (index.IsValid()) {
661 parallel_move.AddMove(index,
662 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
663 Primitive::kPrimInt,
664 nullptr);
665 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
666 } else {
667 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
668 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
669 }
670 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
671 instruction_,
672 instruction_->GetDexPc(),
673 this);
674 CheckEntrypointTypes<
675 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
676 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
677
678 RestoreLiveRegisters(codegen, locations);
679 __ jmp(GetExitLabel());
680 }
681
682 const char* GetDescription() const OVERRIDE {
683 return "ReadBarrierForHeapReferenceSlowPathX86_64";
684 }
685
686 private:
687 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
688 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
689 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
690 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
691 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
692 return static_cast<CpuRegister>(i);
693 }
694 }
695 // We shall never fail to find a free caller-save register, as
696 // there are more than two core caller-save registers on x86-64
697 // (meaning it is possible to find one which is different from
698 // `ref` and `obj`).
699 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
700 LOG(FATAL) << "Could not find a free caller-save register";
701 UNREACHABLE();
702 }
703
Roland Levillain0d5a2812015-11-13 10:07:31 +0000704 const Location out_;
705 const Location ref_;
706 const Location obj_;
707 const uint32_t offset_;
708 // An additional location containing an index to an array.
709 // Only used for HArrayGet and the UnsafeGetObject &
710 // UnsafeGetObjectVolatile intrinsics.
711 const Location index_;
712
713 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
714};
715
716// Slow path generating a read barrier for a GC root.
717class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
718 public:
719 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000720 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000721 DCHECK(kEmitCompilerReadBarrier);
722 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000723
724 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
725 LocationSummary* locations = instruction_->GetLocations();
726 DCHECK(locations->CanCall());
727 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000728 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
729 << "Unexpected instruction in read barrier for GC root slow path: "
730 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000731
732 __ Bind(GetEntryLabel());
733 SaveLiveRegisters(codegen, locations);
734
735 InvokeRuntimeCallingConvention calling_convention;
736 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
737 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
738 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
739 instruction_,
740 instruction_->GetDexPc(),
741 this);
742 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
743 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
744
745 RestoreLiveRegisters(codegen, locations);
746 __ jmp(GetExitLabel());
747 }
748
749 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
750
751 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000752 const Location out_;
753 const Location root_;
754
755 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
756};
757
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100758#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700759// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
760#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100761
Roland Levillain4fa13f62015-07-06 18:11:54 +0100762inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700763 switch (cond) {
764 case kCondEQ: return kEqual;
765 case kCondNE: return kNotEqual;
766 case kCondLT: return kLess;
767 case kCondLE: return kLessEqual;
768 case kCondGT: return kGreater;
769 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700770 case kCondB: return kBelow;
771 case kCondBE: return kBelowEqual;
772 case kCondA: return kAbove;
773 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700774 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100775 LOG(FATAL) << "Unreachable";
776 UNREACHABLE();
777}
778
Aart Bike9f37602015-10-09 11:15:55 -0700779// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100780inline Condition X86_64FPCondition(IfCondition cond) {
781 switch (cond) {
782 case kCondEQ: return kEqual;
783 case kCondNE: return kNotEqual;
784 case kCondLT: return kBelow;
785 case kCondLE: return kBelowEqual;
786 case kCondGT: return kAbove;
787 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700788 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100789 };
790 LOG(FATAL) << "Unreachable";
791 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700792}
793
Vladimir Markodc151b22015-10-15 18:02:30 +0100794HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
795 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
796 MethodReference target_method ATTRIBUTE_UNUSED) {
797 switch (desired_dispatch_info.code_ptr_location) {
798 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
799 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
800 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
801 return HInvokeStaticOrDirect::DispatchInfo {
802 desired_dispatch_info.method_load_kind,
803 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
804 desired_dispatch_info.method_load_data,
805 0u
806 };
807 default:
808 return desired_dispatch_info;
809 }
810}
811
Serguei Katkov288c7a82016-05-16 11:53:15 +0600812Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
813 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800814 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000815 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
816 switch (invoke->GetMethodLoadKind()) {
817 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
818 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000819 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000820 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000821 break;
822 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000823 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000824 break;
825 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
826 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
827 break;
828 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
829 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
830 method_patches_.emplace_back(invoke->GetTargetMethod());
831 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
832 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000833 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000834 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000835 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000836 // Bind a new fixup label at the end of the "movl" insn.
837 uint32_t offset = invoke->GetDexCacheArrayOffset();
838 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000839 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000840 }
Vladimir Marko58155012015-08-19 12:49:41 +0000841 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000842 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000843 Register method_reg;
844 CpuRegister reg = temp.AsRegister<CpuRegister>();
845 if (current_method.IsRegister()) {
846 method_reg = current_method.AsRegister<Register>();
847 } else {
848 DCHECK(invoke->GetLocations()->Intrinsified());
849 DCHECK(!current_method.IsValid());
850 method_reg = reg.AsRegister();
851 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
852 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000853 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100854 __ movq(reg,
855 Address(CpuRegister(method_reg),
856 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100857 // temp = temp[index_in_cache];
858 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
859 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000860 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
861 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100862 }
Vladimir Marko58155012015-08-19 12:49:41 +0000863 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600864 return callee_method;
865}
866
867void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
868 Location temp) {
869 // All registers are assumed to be correctly set up.
870 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000871
872 switch (invoke->GetCodePtrLocation()) {
873 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
874 __ call(&frame_entry_label_);
875 break;
876 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
877 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
878 Label* label = &relative_call_patches_.back().label;
879 __ call(label); // Bind to the patch label, override at link time.
880 __ Bind(label); // Bind the label at the end of the "call" insn.
881 break;
882 }
883 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
884 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100885 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
886 LOG(FATAL) << "Unsupported";
887 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000888 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
889 // (callee_method + offset_of_quick_compiled_code)()
890 __ call(Address(callee_method.AsRegister<CpuRegister>(),
891 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
892 kX86_64WordSize).SizeValue()));
893 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000894 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800895
896 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800897}
898
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000899void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
900 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
901 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
902 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000903
904 // Use the calling convention instead of the location of the receiver, as
905 // intrinsics may have put the receiver in a different register. In the intrinsics
906 // slow path, the arguments have been moved to the right place, so here we are
907 // guaranteed that the receiver is the first register of the calling convention.
908 InvokeDexCallingConvention calling_convention;
909 Register receiver = calling_convention.GetRegisterAt(0);
910
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000911 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000912 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000913 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000914 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000915 // Instead of simply (possibly) unpoisoning `temp` here, we should
916 // emit a read barrier for the previous class reference load.
917 // However this is not required in practice, as this is an
918 // intermediate/temporary reference and because the current
919 // concurrent copying collector keeps the from-space memory
920 // intact/accessible until the end of the marking phase (the
921 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000922 __ MaybeUnpoisonHeapReference(temp);
923 // temp = temp->GetMethodAt(method_offset);
924 __ movq(temp, Address(temp, method_offset));
925 // call temp->GetEntryPoint();
926 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
927 kX86_64WordSize).SizeValue()));
928}
929
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000930void CodeGeneratorX86_64::RecordSimplePatch() {
931 if (GetCompilerOptions().GetIncludePatchInformation()) {
932 simple_patches_.emplace_back();
933 __ Bind(&simple_patches_.back());
934 }
935}
936
937void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
938 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
939 __ Bind(&string_patches_.back().label);
940}
941
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100942void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
943 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
944 __ Bind(&type_patches_.back().label);
945}
946
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000947Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
948 uint32_t element_offset) {
949 // Add a patch entry and return the label.
950 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
951 return &pc_relative_dex_cache_patches_.back().label;
952}
953
Vladimir Marko58155012015-08-19 12:49:41 +0000954void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
955 DCHECK(linker_patches->empty());
956 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000957 method_patches_.size() +
958 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000959 pc_relative_dex_cache_patches_.size() +
960 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100961 string_patches_.size() +
962 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000963 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000964 // The label points to the end of the "movl" insn but the literal offset for method
965 // patch needs to point to the embedded constant which occupies the last 4 bytes.
966 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000967 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000968 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000969 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
970 info.target_method.dex_file,
971 info.target_method.dex_method_index));
972 }
973 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000974 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000975 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
976 info.target_method.dex_file,
977 info.target_method.dex_method_index));
978 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000979 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
980 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000981 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
982 &info.target_dex_file,
983 info.label.Position(),
984 info.element_offset));
985 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000986 for (const Label& label : simple_patches_) {
987 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
988 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
989 }
990 for (const StringPatchInfo<Label>& info : string_patches_) {
991 // These are always PC-relative, see GetSupportedLoadStringKind().
992 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
993 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
994 &info.dex_file,
995 info.label.Position(),
996 info.string_index));
997 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100998 for (const TypePatchInfo<Label>& info : type_patches_) {
999 // These are always PC-relative, see GetSupportedLoadClassKind().
1000 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1001 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
1002 &info.dex_file,
1003 info.label.Position(),
1004 info.type_index));
1005 }
Vladimir Marko58155012015-08-19 12:49:41 +00001006}
1007
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001008void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001009 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001010}
1011
1012void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001013 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001014}
1015
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001016size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1017 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1018 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001019}
1020
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001021size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1022 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1023 return kX86_64WordSize;
1024}
1025
1026size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1027 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1028 return kX86_64WordSize;
1029}
1030
1031size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1032 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1033 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001034}
1035
Calin Juravle175dc732015-08-25 15:42:32 +01001036void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1037 HInstruction* instruction,
1038 uint32_t dex_pc,
1039 SlowPathCode* slow_path) {
1040 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
1041 instruction,
1042 dex_pc,
1043 slow_path);
1044}
1045
1046void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +01001047 HInstruction* instruction,
1048 uint32_t dex_pc,
1049 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001050 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001051 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +01001052 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +01001053}
1054
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001055static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001056// Use a fake return address register to mimic Quick.
1057static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001058CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001059 const X86_64InstructionSetFeatures& isa_features,
1060 const CompilerOptions& compiler_options,
1061 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001062 : CodeGenerator(graph,
1063 kNumberOfCpuRegisters,
1064 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001065 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001066 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1067 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001068 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001069 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1070 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001071 compiler_options,
1072 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001073 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001074 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001075 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001076 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001077 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001078 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001079 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001080 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1081 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001082 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001083 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1084 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001085 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001086 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001087 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1088}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001089
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001090InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1091 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001092 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001093 assembler_(codegen->GetAssembler()),
1094 codegen_(codegen) {}
1095
David Brazdil58282f42016-01-14 12:45:10 +00001096void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001097 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001098 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001099
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001100 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001101 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001102}
1103
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001104static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001105 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001106}
David Srbecky9d8606d2015-04-12 09:35:32 +01001107
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001108static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001109 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001110}
1111
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001112void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001113 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001114 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001115 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001116 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001117 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001118
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001119 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001120 __ testq(CpuRegister(RAX), Address(
1121 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001122 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001123 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001124
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001125 if (HasEmptyFrame()) {
1126 return;
1127 }
1128
Nicolas Geoffray98893962015-01-21 12:32:32 +00001129 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001130 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001131 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001132 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001133 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1134 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001135 }
1136 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001137
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001138 int adjust = GetFrameSize() - GetCoreSpillSize();
1139 __ subq(CpuRegister(RSP), Immediate(adjust));
1140 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001141 uint32_t xmm_spill_location = GetFpuSpillStart();
1142 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001143
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001144 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1145 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001146 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1147 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1148 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001149 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001150 }
1151
Mathieu Chartiere401d142015-04-22 13:56:20 -07001152 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001153 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001154}
1155
1156void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001157 __ cfi().RememberState();
1158 if (!HasEmptyFrame()) {
1159 uint32_t xmm_spill_location = GetFpuSpillStart();
1160 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1161 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1162 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1163 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1164 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1165 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1166 }
1167 }
1168
1169 int adjust = GetFrameSize() - GetCoreSpillSize();
1170 __ addq(CpuRegister(RSP), Immediate(adjust));
1171 __ cfi().AdjustCFAOffset(-adjust);
1172
1173 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1174 Register reg = kCoreCalleeSaves[i];
1175 if (allocated_registers_.ContainsCoreRegister(reg)) {
1176 __ popq(CpuRegister(reg));
1177 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1178 __ cfi().Restore(DWARFReg(reg));
1179 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001180 }
1181 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001182 __ ret();
1183 __ cfi().RestoreState();
1184 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001185}
1186
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001187void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1188 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001189}
1190
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001191void CodeGeneratorX86_64::Move(Location destination, Location source) {
1192 if (source.Equals(destination)) {
1193 return;
1194 }
1195 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001196 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001197 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001198 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001199 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001200 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001201 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001202 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1203 } else if (source.IsConstant()) {
1204 HConstant* constant = source.GetConstant();
1205 if (constant->IsLongConstant()) {
1206 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1207 } else {
1208 Load32BitValue(dest, GetInt32ValueOf(constant));
1209 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001210 } else {
1211 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001212 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001213 }
1214 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001215 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001216 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001217 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001218 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001219 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1220 } else if (source.IsConstant()) {
1221 HConstant* constant = source.GetConstant();
1222 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1223 if (constant->IsFloatConstant()) {
1224 Load32BitValue(dest, static_cast<int32_t>(value));
1225 } else {
1226 Load64BitValue(dest, value);
1227 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001228 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001229 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001230 } else {
1231 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001232 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001233 }
1234 } else if (destination.IsStackSlot()) {
1235 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001236 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001237 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001238 } else if (source.IsFpuRegister()) {
1239 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001240 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001241 } else if (source.IsConstant()) {
1242 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001243 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001244 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001245 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001246 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001247 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1248 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001249 }
1250 } else {
1251 DCHECK(destination.IsDoubleStackSlot());
1252 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001253 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001254 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001255 } else if (source.IsFpuRegister()) {
1256 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001257 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001258 } else if (source.IsConstant()) {
1259 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001260 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001261 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001262 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001263 } else {
1264 DCHECK(constant->IsLongConstant());
1265 value = constant->AsLongConstant()->GetValue();
1266 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001267 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001268 } else {
1269 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001270 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1271 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001272 }
1273 }
1274}
1275
Calin Juravle175dc732015-08-25 15:42:32 +01001276void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1277 DCHECK(location.IsRegister());
1278 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1279}
1280
Calin Juravlee460d1d2015-09-29 04:52:17 +01001281void CodeGeneratorX86_64::MoveLocation(
1282 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1283 Move(dst, src);
1284}
1285
1286void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1287 if (location.IsRegister()) {
1288 locations->AddTemp(location);
1289 } else {
1290 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1291 }
1292}
1293
David Brazdilfc6a86a2015-06-26 10:33:45 +00001294void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001295 DCHECK(!successor->IsExitBlock());
1296
1297 HBasicBlock* block = got->GetBlock();
1298 HInstruction* previous = got->GetPrevious();
1299
1300 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001301 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001302 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1303 return;
1304 }
1305
1306 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1307 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1308 }
1309 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001310 __ jmp(codegen_->GetLabelOf(successor));
1311 }
1312}
1313
David Brazdilfc6a86a2015-06-26 10:33:45 +00001314void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1315 got->SetLocations(nullptr);
1316}
1317
1318void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1319 HandleGoto(got, got->GetSuccessor());
1320}
1321
1322void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1323 try_boundary->SetLocations(nullptr);
1324}
1325
1326void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1327 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1328 if (!successor->IsExitBlock()) {
1329 HandleGoto(try_boundary, successor);
1330 }
1331}
1332
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001333void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1334 exit->SetLocations(nullptr);
1335}
1336
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001337void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001338}
1339
Mark Mendell152408f2015-12-31 12:28:50 -05001340template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001341void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001342 LabelType* true_label,
1343 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001344 if (cond->IsFPConditionTrueIfNaN()) {
1345 __ j(kUnordered, true_label);
1346 } else if (cond->IsFPConditionFalseIfNaN()) {
1347 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001348 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001349 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001350}
1351
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001352void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001353 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001354
Mark Mendellc4701932015-04-10 13:18:51 -04001355 Location left = locations->InAt(0);
1356 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001357 Primitive::Type type = condition->InputAt(0)->GetType();
1358 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001359 case Primitive::kPrimBoolean:
1360 case Primitive::kPrimByte:
1361 case Primitive::kPrimChar:
1362 case Primitive::kPrimShort:
1363 case Primitive::kPrimInt:
1364 case Primitive::kPrimNot: {
1365 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1366 if (right.IsConstant()) {
1367 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1368 if (value == 0) {
1369 __ testl(left_reg, left_reg);
1370 } else {
1371 __ cmpl(left_reg, Immediate(value));
1372 }
1373 } else if (right.IsStackSlot()) {
1374 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1375 } else {
1376 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1377 }
1378 break;
1379 }
Mark Mendellc4701932015-04-10 13:18:51 -04001380 case Primitive::kPrimLong: {
1381 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1382 if (right.IsConstant()) {
1383 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001384 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001385 } else if (right.IsDoubleStackSlot()) {
1386 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1387 } else {
1388 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1389 }
Mark Mendellc4701932015-04-10 13:18:51 -04001390 break;
1391 }
1392 case Primitive::kPrimFloat: {
1393 if (right.IsFpuRegister()) {
1394 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1395 } else if (right.IsConstant()) {
1396 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1397 codegen_->LiteralFloatAddress(
1398 right.GetConstant()->AsFloatConstant()->GetValue()));
1399 } else {
1400 DCHECK(right.IsStackSlot());
1401 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1402 Address(CpuRegister(RSP), right.GetStackIndex()));
1403 }
Mark Mendellc4701932015-04-10 13:18:51 -04001404 break;
1405 }
1406 case Primitive::kPrimDouble: {
1407 if (right.IsFpuRegister()) {
1408 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1409 } else if (right.IsConstant()) {
1410 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1411 codegen_->LiteralDoubleAddress(
1412 right.GetConstant()->AsDoubleConstant()->GetValue()));
1413 } else {
1414 DCHECK(right.IsDoubleStackSlot());
1415 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1416 Address(CpuRegister(RSP), right.GetStackIndex()));
1417 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001418 break;
1419 }
1420 default:
1421 LOG(FATAL) << "Unexpected condition type " << type;
1422 }
1423}
1424
1425template<class LabelType>
1426void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1427 LabelType* true_target_in,
1428 LabelType* false_target_in) {
1429 // Generated branching requires both targets to be explicit. If either of the
1430 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1431 LabelType fallthrough_target;
1432 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1433 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1434
1435 // Generate the comparison to set the CC.
1436 GenerateCompareTest(condition);
1437
1438 // Now generate the correct jump(s).
1439 Primitive::Type type = condition->InputAt(0)->GetType();
1440 switch (type) {
1441 case Primitive::kPrimLong: {
1442 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1443 break;
1444 }
1445 case Primitive::kPrimFloat: {
1446 GenerateFPJumps(condition, true_target, false_target);
1447 break;
1448 }
1449 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001450 GenerateFPJumps(condition, true_target, false_target);
1451 break;
1452 }
1453 default:
1454 LOG(FATAL) << "Unexpected condition type " << type;
1455 }
1456
David Brazdil0debae72015-11-12 18:37:00 +00001457 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001458 __ jmp(false_target);
1459 }
David Brazdil0debae72015-11-12 18:37:00 +00001460
1461 if (fallthrough_target.IsLinked()) {
1462 __ Bind(&fallthrough_target);
1463 }
Mark Mendellc4701932015-04-10 13:18:51 -04001464}
1465
David Brazdil0debae72015-11-12 18:37:00 +00001466static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1467 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1468 // are set only strictly before `branch`. We can't use the eflags on long
1469 // conditions if they are materialized due to the complex branching.
1470 return cond->IsCondition() &&
1471 cond->GetNext() == branch &&
1472 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1473}
1474
Mark Mendell152408f2015-12-31 12:28:50 -05001475template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001476void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001477 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001478 LabelType* true_target,
1479 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001480 HInstruction* cond = instruction->InputAt(condition_input_index);
1481
1482 if (true_target == nullptr && false_target == nullptr) {
1483 // Nothing to do. The code always falls through.
1484 return;
1485 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001486 // Constant condition, statically compared against "true" (integer value 1).
1487 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001488 if (true_target != nullptr) {
1489 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001490 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001491 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001492 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001493 if (false_target != nullptr) {
1494 __ jmp(false_target);
1495 }
1496 }
1497 return;
1498 }
1499
1500 // The following code generates these patterns:
1501 // (1) true_target == nullptr && false_target != nullptr
1502 // - opposite condition true => branch to false_target
1503 // (2) true_target != nullptr && false_target == nullptr
1504 // - condition true => branch to true_target
1505 // (3) true_target != nullptr && false_target != nullptr
1506 // - condition true => branch to true_target
1507 // - branch to false_target
1508 if (IsBooleanValueOrMaterializedCondition(cond)) {
1509 if (AreEflagsSetFrom(cond, instruction)) {
1510 if (true_target == nullptr) {
1511 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1512 } else {
1513 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1514 }
1515 } else {
1516 // Materialized condition, compare against 0.
1517 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1518 if (lhs.IsRegister()) {
1519 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1520 } else {
1521 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1522 }
1523 if (true_target == nullptr) {
1524 __ j(kEqual, false_target);
1525 } else {
1526 __ j(kNotEqual, true_target);
1527 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001528 }
1529 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001530 // Condition has not been materialized, use its inputs as the
1531 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001532 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001533
David Brazdil0debae72015-11-12 18:37:00 +00001534 // If this is a long or FP comparison that has been folded into
1535 // the HCondition, generate the comparison directly.
1536 Primitive::Type type = condition->InputAt(0)->GetType();
1537 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1538 GenerateCompareTestAndBranch(condition, true_target, false_target);
1539 return;
1540 }
1541
1542 Location lhs = condition->GetLocations()->InAt(0);
1543 Location rhs = condition->GetLocations()->InAt(1);
1544 if (rhs.IsRegister()) {
1545 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1546 } else if (rhs.IsConstant()) {
1547 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001548 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001549 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001550 __ cmpl(lhs.AsRegister<CpuRegister>(),
1551 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1552 }
1553 if (true_target == nullptr) {
1554 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1555 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001556 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001557 }
Dave Allison20dfc792014-06-16 20:44:29 -07001558 }
David Brazdil0debae72015-11-12 18:37:00 +00001559
1560 // If neither branch falls through (case 3), the conditional branch to `true_target`
1561 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1562 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001563 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001564 }
1565}
1566
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001567void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001568 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1569 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001570 locations->SetInAt(0, Location::Any());
1571 }
1572}
1573
1574void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001575 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1576 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1577 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1578 nullptr : codegen_->GetLabelOf(true_successor);
1579 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1580 nullptr : codegen_->GetLabelOf(false_successor);
1581 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001582}
1583
1584void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1585 LocationSummary* locations = new (GetGraph()->GetArena())
1586 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001587 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001588 locations->SetInAt(0, Location::Any());
1589 }
1590}
1591
1592void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001593 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001594 GenerateTestAndBranch<Label>(deoptimize,
1595 /* condition_input_index */ 0,
1596 slow_path->GetEntryLabel(),
1597 /* false_target */ nullptr);
1598}
1599
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001600static bool SelectCanUseCMOV(HSelect* select) {
1601 // There are no conditional move instructions for XMMs.
1602 if (Primitive::IsFloatingPointType(select->GetType())) {
1603 return false;
1604 }
1605
1606 // A FP condition doesn't generate the single CC that we need.
1607 HInstruction* condition = select->GetCondition();
1608 if (condition->IsCondition() &&
1609 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1610 return false;
1611 }
1612
1613 // We can generate a CMOV for this Select.
1614 return true;
1615}
1616
David Brazdil74eb1b22015-12-14 11:44:01 +00001617void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1618 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1619 if (Primitive::IsFloatingPointType(select->GetType())) {
1620 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001621 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001622 } else {
1623 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001624 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001625 if (select->InputAt(1)->IsConstant()) {
1626 locations->SetInAt(1, Location::RequiresRegister());
1627 } else {
1628 locations->SetInAt(1, Location::Any());
1629 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001630 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001631 locations->SetInAt(1, Location::Any());
1632 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001633 }
1634 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1635 locations->SetInAt(2, Location::RequiresRegister());
1636 }
1637 locations->SetOut(Location::SameAsFirstInput());
1638}
1639
1640void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1641 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001642 if (SelectCanUseCMOV(select)) {
1643 // If both the condition and the source types are integer, we can generate
1644 // a CMOV to implement Select.
1645 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001646 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001647 DCHECK(locations->InAt(0).Equals(locations->Out()));
1648
1649 HInstruction* select_condition = select->GetCondition();
1650 Condition cond = kNotEqual;
1651
1652 // Figure out how to test the 'condition'.
1653 if (select_condition->IsCondition()) {
1654 HCondition* condition = select_condition->AsCondition();
1655 if (!condition->IsEmittedAtUseSite()) {
1656 // This was a previously materialized condition.
1657 // Can we use the existing condition code?
1658 if (AreEflagsSetFrom(condition, select)) {
1659 // Materialization was the previous instruction. Condition codes are right.
1660 cond = X86_64IntegerCondition(condition->GetCondition());
1661 } else {
1662 // No, we have to recreate the condition code.
1663 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1664 __ testl(cond_reg, cond_reg);
1665 }
1666 } else {
1667 GenerateCompareTest(condition);
1668 cond = X86_64IntegerCondition(condition->GetCondition());
1669 }
1670 } else {
1671 // Must be a boolean condition, which needs to be compared to 0.
1672 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1673 __ testl(cond_reg, cond_reg);
1674 }
1675
1676 // If the condition is true, overwrite the output, which already contains false.
1677 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001678 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1679 if (value_true_loc.IsRegister()) {
1680 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1681 } else {
1682 __ cmov(cond,
1683 value_false,
1684 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1685 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001686 } else {
1687 NearLabel false_target;
1688 GenerateTestAndBranch<NearLabel>(select,
1689 /* condition_input_index */ 2,
1690 /* true_target */ nullptr,
1691 &false_target);
1692 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1693 __ Bind(&false_target);
1694 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001695}
1696
David Srbecky0cf44932015-12-09 14:09:59 +00001697void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1698 new (GetGraph()->GetArena()) LocationSummary(info);
1699}
1700
David Srbeckyd28f4a02016-03-14 17:14:24 +00001701void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1702 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001703}
1704
1705void CodeGeneratorX86_64::GenerateNop() {
1706 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001707}
1708
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001709void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001710 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001711 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001712 // Handle the long/FP comparisons made in instruction simplification.
1713 switch (cond->InputAt(0)->GetType()) {
1714 case Primitive::kPrimLong:
1715 locations->SetInAt(0, Location::RequiresRegister());
1716 locations->SetInAt(1, Location::Any());
1717 break;
1718 case Primitive::kPrimFloat:
1719 case Primitive::kPrimDouble:
1720 locations->SetInAt(0, Location::RequiresFpuRegister());
1721 locations->SetInAt(1, Location::Any());
1722 break;
1723 default:
1724 locations->SetInAt(0, Location::RequiresRegister());
1725 locations->SetInAt(1, Location::Any());
1726 break;
1727 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001728 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001729 locations->SetOut(Location::RequiresRegister());
1730 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001731}
1732
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001733void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001734 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001735 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001736 }
Mark Mendellc4701932015-04-10 13:18:51 -04001737
1738 LocationSummary* locations = cond->GetLocations();
1739 Location lhs = locations->InAt(0);
1740 Location rhs = locations->InAt(1);
1741 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001742 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001743
1744 switch (cond->InputAt(0)->GetType()) {
1745 default:
1746 // Integer case.
1747
1748 // Clear output register: setcc only sets the low byte.
1749 __ xorl(reg, reg);
1750
1751 if (rhs.IsRegister()) {
1752 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1753 } else if (rhs.IsConstant()) {
1754 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001755 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001756 } else {
1757 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1758 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001759 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001760 return;
1761 case Primitive::kPrimLong:
1762 // Clear output register: setcc only sets the low byte.
1763 __ xorl(reg, reg);
1764
1765 if (rhs.IsRegister()) {
1766 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1767 } else if (rhs.IsConstant()) {
1768 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001769 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001770 } else {
1771 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1772 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001773 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001774 return;
1775 case Primitive::kPrimFloat: {
1776 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1777 if (rhs.IsConstant()) {
1778 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1779 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1780 } else if (rhs.IsStackSlot()) {
1781 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1782 } else {
1783 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1784 }
1785 GenerateFPJumps(cond, &true_label, &false_label);
1786 break;
1787 }
1788 case Primitive::kPrimDouble: {
1789 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1790 if (rhs.IsConstant()) {
1791 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1792 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1793 } else if (rhs.IsDoubleStackSlot()) {
1794 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1795 } else {
1796 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1797 }
1798 GenerateFPJumps(cond, &true_label, &false_label);
1799 break;
1800 }
1801 }
1802
1803 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001804 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001805
Roland Levillain4fa13f62015-07-06 18:11:54 +01001806 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001807 __ Bind(&false_label);
1808 __ xorl(reg, reg);
1809 __ jmp(&done_label);
1810
Roland Levillain4fa13f62015-07-06 18:11:54 +01001811 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001812 __ Bind(&true_label);
1813 __ movl(reg, Immediate(1));
1814 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001815}
1816
1817void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001818 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001819}
1820
1821void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001822 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001823}
1824
1825void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001826 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001827}
1828
1829void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001830 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001831}
1832
1833void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001834 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001835}
1836
1837void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001838 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001839}
1840
1841void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001842 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001843}
1844
1845void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001846 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001847}
1848
1849void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001850 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001851}
1852
1853void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001854 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001855}
1856
1857void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001858 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001859}
1860
1861void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001862 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001863}
1864
Aart Bike9f37602015-10-09 11:15:55 -07001865void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001866 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001867}
1868
1869void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001870 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001871}
1872
1873void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001874 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001875}
1876
1877void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001878 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001879}
1880
1881void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001882 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001883}
1884
1885void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001886 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001887}
1888
1889void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001890 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001891}
1892
1893void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001894 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001895}
1896
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001897void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001898 LocationSummary* locations =
1899 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001900 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001901 case Primitive::kPrimBoolean:
1902 case Primitive::kPrimByte:
1903 case Primitive::kPrimShort:
1904 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001905 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001906 case Primitive::kPrimLong: {
1907 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001908 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001909 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1910 break;
1911 }
1912 case Primitive::kPrimFloat:
1913 case Primitive::kPrimDouble: {
1914 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001915 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001916 locations->SetOut(Location::RequiresRegister());
1917 break;
1918 }
1919 default:
1920 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1921 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001922}
1923
1924void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001925 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001926 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001927 Location left = locations->InAt(0);
1928 Location right = locations->InAt(1);
1929
Mark Mendell0c9497d2015-08-21 09:30:05 -04001930 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001931 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001932 Condition less_cond = kLess;
1933
Calin Juravleddb7df22014-11-25 20:56:51 +00001934 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001935 case Primitive::kPrimBoolean:
1936 case Primitive::kPrimByte:
1937 case Primitive::kPrimShort:
1938 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001939 case Primitive::kPrimInt: {
1940 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1941 if (right.IsConstant()) {
1942 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1943 codegen_->Compare32BitValue(left_reg, value);
1944 } else if (right.IsStackSlot()) {
1945 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1946 } else {
1947 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1948 }
1949 break;
1950 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001951 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001952 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1953 if (right.IsConstant()) {
1954 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001955 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001956 } else if (right.IsDoubleStackSlot()) {
1957 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001958 } else {
1959 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1960 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001961 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001962 }
1963 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001964 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1965 if (right.IsConstant()) {
1966 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1967 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1968 } else if (right.IsStackSlot()) {
1969 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1970 } else {
1971 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1972 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001973 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001974 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001975 break;
1976 }
1977 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001978 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1979 if (right.IsConstant()) {
1980 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1981 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1982 } else if (right.IsDoubleStackSlot()) {
1983 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1984 } else {
1985 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1986 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001987 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001988 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001989 break;
1990 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001991 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001992 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001993 }
Aart Bika19616e2016-02-01 18:57:58 -08001994
Calin Juravleddb7df22014-11-25 20:56:51 +00001995 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001996 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001997 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001998
Calin Juravle91debbc2014-11-26 19:01:09 +00001999 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002000 __ movl(out, Immediate(1));
2001 __ jmp(&done);
2002
2003 __ Bind(&less);
2004 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002005
2006 __ Bind(&done);
2007}
2008
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002009void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002010 LocationSummary* locations =
2011 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002012 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002013}
2014
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002015void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002016 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002017}
2018
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002019void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2020 LocationSummary* locations =
2021 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2022 locations->SetOut(Location::ConstantLocation(constant));
2023}
2024
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002025void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002026 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002027}
2028
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002029void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002030 LocationSummary* locations =
2031 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002032 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002033}
2034
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002035void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002036 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002037}
2038
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002039void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2040 LocationSummary* locations =
2041 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2042 locations->SetOut(Location::ConstantLocation(constant));
2043}
2044
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002045void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002046 // Will be generated at use site.
2047}
2048
2049void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2050 LocationSummary* locations =
2051 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2052 locations->SetOut(Location::ConstantLocation(constant));
2053}
2054
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002055void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2056 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002057 // Will be generated at use site.
2058}
2059
Calin Juravle27df7582015-04-17 19:12:31 +01002060void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2061 memory_barrier->SetLocations(nullptr);
2062}
2063
2064void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002065 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002066}
2067
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002068void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2069 ret->SetLocations(nullptr);
2070}
2071
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002072void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002073 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002074}
2075
2076void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002077 LocationSummary* locations =
2078 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002079 switch (ret->InputAt(0)->GetType()) {
2080 case Primitive::kPrimBoolean:
2081 case Primitive::kPrimByte:
2082 case Primitive::kPrimChar:
2083 case Primitive::kPrimShort:
2084 case Primitive::kPrimInt:
2085 case Primitive::kPrimNot:
2086 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002087 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002088 break;
2089
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002090 case Primitive::kPrimFloat:
2091 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002092 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002093 break;
2094
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002095 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002096 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002097 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002098}
2099
2100void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2101 if (kIsDebugBuild) {
2102 switch (ret->InputAt(0)->GetType()) {
2103 case Primitive::kPrimBoolean:
2104 case Primitive::kPrimByte:
2105 case Primitive::kPrimChar:
2106 case Primitive::kPrimShort:
2107 case Primitive::kPrimInt:
2108 case Primitive::kPrimNot:
2109 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002110 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002111 break;
2112
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002113 case Primitive::kPrimFloat:
2114 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002115 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002116 XMM0);
2117 break;
2118
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002119 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002120 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002121 }
2122 }
2123 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002124}
2125
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002126Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2127 switch (type) {
2128 case Primitive::kPrimBoolean:
2129 case Primitive::kPrimByte:
2130 case Primitive::kPrimChar:
2131 case Primitive::kPrimShort:
2132 case Primitive::kPrimInt:
2133 case Primitive::kPrimNot:
2134 case Primitive::kPrimLong:
2135 return Location::RegisterLocation(RAX);
2136
2137 case Primitive::kPrimVoid:
2138 return Location::NoLocation();
2139
2140 case Primitive::kPrimDouble:
2141 case Primitive::kPrimFloat:
2142 return Location::FpuRegisterLocation(XMM0);
2143 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002144
2145 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002146}
2147
2148Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2149 return Location::RegisterLocation(kMethodRegisterArgument);
2150}
2151
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002152Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002153 switch (type) {
2154 case Primitive::kPrimBoolean:
2155 case Primitive::kPrimByte:
2156 case Primitive::kPrimChar:
2157 case Primitive::kPrimShort:
2158 case Primitive::kPrimInt:
2159 case Primitive::kPrimNot: {
2160 uint32_t index = gp_index_++;
2161 stack_index_++;
2162 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002163 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002164 } else {
2165 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2166 }
2167 }
2168
2169 case Primitive::kPrimLong: {
2170 uint32_t index = gp_index_;
2171 stack_index_ += 2;
2172 if (index < calling_convention.GetNumberOfRegisters()) {
2173 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002174 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002175 } else {
2176 gp_index_ += 2;
2177 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2178 }
2179 }
2180
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002181 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002182 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002183 stack_index_++;
2184 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002185 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002186 } else {
2187 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2188 }
2189 }
2190
2191 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002192 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002193 stack_index_ += 2;
2194 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002195 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002196 } else {
2197 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2198 }
2199 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002200
2201 case Primitive::kPrimVoid:
2202 LOG(FATAL) << "Unexpected parameter type " << type;
2203 break;
2204 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002205 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002206}
2207
Calin Juravle175dc732015-08-25 15:42:32 +01002208void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2209 // The trampoline uses the same calling convention as dex calling conventions,
2210 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2211 // the method_idx.
2212 HandleInvoke(invoke);
2213}
2214
2215void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2216 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2217}
2218
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002219void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002220 // Explicit clinit checks triggered by static invokes must have been pruned by
2221 // art::PrepareForRegisterAllocation.
2222 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002223
Mark Mendellfb8d2792015-03-31 22:16:59 -04002224 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002225 if (intrinsic.TryDispatch(invoke)) {
2226 return;
2227 }
2228
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002229 HandleInvoke(invoke);
2230}
2231
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002232static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2233 if (invoke->GetLocations()->Intrinsified()) {
2234 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2235 intrinsic.Dispatch(invoke);
2236 return true;
2237 }
2238 return false;
2239}
2240
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002241void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002242 // Explicit clinit checks triggered by static invokes must have been pruned by
2243 // art::PrepareForRegisterAllocation.
2244 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002245
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002246 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2247 return;
2248 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002249
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002250 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002251 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002252 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002253 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002254}
2255
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002256void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002257 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002258 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002259}
2260
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002261void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002262 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002263 if (intrinsic.TryDispatch(invoke)) {
2264 return;
2265 }
2266
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002267 HandleInvoke(invoke);
2268}
2269
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002270void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002271 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2272 return;
2273 }
2274
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002275 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002276 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002277 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002278}
2279
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002280void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2281 HandleInvoke(invoke);
2282 // Add the hidden argument.
2283 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2284}
2285
2286void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2287 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002288 LocationSummary* locations = invoke->GetLocations();
2289 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2290 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002291 Location receiver = locations->InAt(0);
2292 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2293
Roland Levillain0d5a2812015-11-13 10:07:31 +00002294 // Set the hidden argument. This is safe to do this here, as RAX
2295 // won't be modified thereafter, before the `call` instruction.
2296 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002297 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002298
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002299 if (receiver.IsStackSlot()) {
2300 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002301 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002302 __ movl(temp, Address(temp, class_offset));
2303 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002304 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002305 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002306 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002307 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002308 // Instead of simply (possibly) unpoisoning `temp` here, we should
2309 // emit a read barrier for the previous class reference load.
2310 // However this is not required in practice, as this is an
2311 // intermediate/temporary reference and because the current
2312 // concurrent copying collector keeps the from-space memory
2313 // intact/accessible until the end of the marking phase (the
2314 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002315 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002316 // temp = temp->GetAddressOfIMT()
2317 __ movq(temp,
2318 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2319 // temp = temp->GetImtEntryAt(method_offset);
2320 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
2321 invoke->GetImtIndex() % ImTable::kSize, kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002322 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002323 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002324 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002325 __ call(Address(temp,
2326 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002327
2328 DCHECK(!codegen_->IsLeafMethod());
2329 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2330}
2331
Roland Levillain88cb1752014-10-20 16:36:47 +01002332void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2333 LocationSummary* locations =
2334 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2335 switch (neg->GetResultType()) {
2336 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002337 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002338 locations->SetInAt(0, Location::RequiresRegister());
2339 locations->SetOut(Location::SameAsFirstInput());
2340 break;
2341
Roland Levillain88cb1752014-10-20 16:36:47 +01002342 case Primitive::kPrimFloat:
2343 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002344 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002345 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002346 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002347 break;
2348
2349 default:
2350 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2351 }
2352}
2353
2354void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2355 LocationSummary* locations = neg->GetLocations();
2356 Location out = locations->Out();
2357 Location in = locations->InAt(0);
2358 switch (neg->GetResultType()) {
2359 case Primitive::kPrimInt:
2360 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002361 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002362 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002363 break;
2364
2365 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002366 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002367 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002368 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002369 break;
2370
Roland Levillain5368c212014-11-27 15:03:41 +00002371 case Primitive::kPrimFloat: {
2372 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002373 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002374 // Implement float negation with an exclusive or with value
2375 // 0x80000000 (mask for bit 31, representing the sign of a
2376 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002377 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002378 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002379 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002380 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002381
Roland Levillain5368c212014-11-27 15:03:41 +00002382 case Primitive::kPrimDouble: {
2383 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002384 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002385 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002386 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002387 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002388 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002389 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002390 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002391 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002392
2393 default:
2394 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2395 }
2396}
2397
Roland Levillaindff1f282014-11-05 14:15:05 +00002398void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2399 LocationSummary* locations =
2400 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2401 Primitive::Type result_type = conversion->GetResultType();
2402 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002403 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002404
David Brazdilb2bd1c52015-03-25 11:17:37 +00002405 // The Java language does not allow treating boolean as an integral type but
2406 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002407
Roland Levillaindff1f282014-11-05 14:15:05 +00002408 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002409 case Primitive::kPrimByte:
2410 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002411 case Primitive::kPrimLong:
2412 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002413 case Primitive::kPrimBoolean:
2414 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002415 case Primitive::kPrimShort:
2416 case Primitive::kPrimInt:
2417 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002418 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002419 locations->SetInAt(0, Location::Any());
2420 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2421 break;
2422
2423 default:
2424 LOG(FATAL) << "Unexpected type conversion from " << input_type
2425 << " to " << result_type;
2426 }
2427 break;
2428
Roland Levillain01a8d712014-11-14 16:27:39 +00002429 case Primitive::kPrimShort:
2430 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002431 case Primitive::kPrimLong:
2432 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002433 case Primitive::kPrimBoolean:
2434 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002435 case Primitive::kPrimByte:
2436 case Primitive::kPrimInt:
2437 case Primitive::kPrimChar:
2438 // Processing a Dex `int-to-short' instruction.
2439 locations->SetInAt(0, Location::Any());
2440 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2441 break;
2442
2443 default:
2444 LOG(FATAL) << "Unexpected type conversion from " << input_type
2445 << " to " << result_type;
2446 }
2447 break;
2448
Roland Levillain946e1432014-11-11 17:35:19 +00002449 case Primitive::kPrimInt:
2450 switch (input_type) {
2451 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002452 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002453 locations->SetInAt(0, Location::Any());
2454 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2455 break;
2456
2457 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002458 // Processing a Dex `float-to-int' instruction.
2459 locations->SetInAt(0, Location::RequiresFpuRegister());
2460 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002461 break;
2462
Roland Levillain946e1432014-11-11 17:35:19 +00002463 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002464 // Processing a Dex `double-to-int' instruction.
2465 locations->SetInAt(0, Location::RequiresFpuRegister());
2466 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002467 break;
2468
2469 default:
2470 LOG(FATAL) << "Unexpected type conversion from " << input_type
2471 << " to " << result_type;
2472 }
2473 break;
2474
Roland Levillaindff1f282014-11-05 14:15:05 +00002475 case Primitive::kPrimLong:
2476 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002477 case Primitive::kPrimBoolean:
2478 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002479 case Primitive::kPrimByte:
2480 case Primitive::kPrimShort:
2481 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002482 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002483 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002484 // TODO: We would benefit from a (to-be-implemented)
2485 // Location::RegisterOrStackSlot requirement for this input.
2486 locations->SetInAt(0, Location::RequiresRegister());
2487 locations->SetOut(Location::RequiresRegister());
2488 break;
2489
2490 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002491 // Processing a Dex `float-to-long' instruction.
2492 locations->SetInAt(0, Location::RequiresFpuRegister());
2493 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002494 break;
2495
Roland Levillaindff1f282014-11-05 14:15:05 +00002496 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002497 // Processing a Dex `double-to-long' instruction.
2498 locations->SetInAt(0, Location::RequiresFpuRegister());
2499 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002500 break;
2501
2502 default:
2503 LOG(FATAL) << "Unexpected type conversion from " << input_type
2504 << " to " << result_type;
2505 }
2506 break;
2507
Roland Levillain981e4542014-11-14 11:47:14 +00002508 case Primitive::kPrimChar:
2509 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002510 case Primitive::kPrimLong:
2511 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002512 case Primitive::kPrimBoolean:
2513 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002514 case Primitive::kPrimByte:
2515 case Primitive::kPrimShort:
2516 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002517 // Processing a Dex `int-to-char' instruction.
2518 locations->SetInAt(0, Location::Any());
2519 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2520 break;
2521
2522 default:
2523 LOG(FATAL) << "Unexpected type conversion from " << input_type
2524 << " to " << result_type;
2525 }
2526 break;
2527
Roland Levillaindff1f282014-11-05 14:15:05 +00002528 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002529 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002530 case Primitive::kPrimBoolean:
2531 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002532 case Primitive::kPrimByte:
2533 case Primitive::kPrimShort:
2534 case Primitive::kPrimInt:
2535 case Primitive::kPrimChar:
2536 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002537 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002538 locations->SetOut(Location::RequiresFpuRegister());
2539 break;
2540
2541 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002542 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002543 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002544 locations->SetOut(Location::RequiresFpuRegister());
2545 break;
2546
Roland Levillaincff13742014-11-17 14:32:17 +00002547 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002548 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002549 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002550 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002551 break;
2552
2553 default:
2554 LOG(FATAL) << "Unexpected type conversion from " << input_type
2555 << " to " << result_type;
2556 };
2557 break;
2558
Roland Levillaindff1f282014-11-05 14:15:05 +00002559 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002560 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002561 case Primitive::kPrimBoolean:
2562 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002563 case Primitive::kPrimByte:
2564 case Primitive::kPrimShort:
2565 case Primitive::kPrimInt:
2566 case Primitive::kPrimChar:
2567 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002568 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002569 locations->SetOut(Location::RequiresFpuRegister());
2570 break;
2571
2572 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002573 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002574 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002575 locations->SetOut(Location::RequiresFpuRegister());
2576 break;
2577
Roland Levillaincff13742014-11-17 14:32:17 +00002578 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002579 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002580 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002581 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002582 break;
2583
2584 default:
2585 LOG(FATAL) << "Unexpected type conversion from " << input_type
2586 << " to " << result_type;
2587 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002588 break;
2589
2590 default:
2591 LOG(FATAL) << "Unexpected type conversion from " << input_type
2592 << " to " << result_type;
2593 }
2594}
2595
2596void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2597 LocationSummary* locations = conversion->GetLocations();
2598 Location out = locations->Out();
2599 Location in = locations->InAt(0);
2600 Primitive::Type result_type = conversion->GetResultType();
2601 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002602 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002603 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002604 case Primitive::kPrimByte:
2605 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002606 case Primitive::kPrimLong:
2607 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002608 case Primitive::kPrimBoolean:
2609 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002610 case Primitive::kPrimShort:
2611 case Primitive::kPrimInt:
2612 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002613 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002614 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002615 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002616 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002617 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002618 Address(CpuRegister(RSP), in.GetStackIndex()));
2619 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002620 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002621 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002622 }
2623 break;
2624
2625 default:
2626 LOG(FATAL) << "Unexpected type conversion from " << input_type
2627 << " to " << result_type;
2628 }
2629 break;
2630
Roland Levillain01a8d712014-11-14 16:27:39 +00002631 case Primitive::kPrimShort:
2632 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002633 case Primitive::kPrimLong:
2634 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002635 case Primitive::kPrimBoolean:
2636 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002637 case Primitive::kPrimByte:
2638 case Primitive::kPrimInt:
2639 case Primitive::kPrimChar:
2640 // Processing a Dex `int-to-short' instruction.
2641 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002642 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002643 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002644 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002645 Address(CpuRegister(RSP), in.GetStackIndex()));
2646 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002647 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002648 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002649 }
2650 break;
2651
2652 default:
2653 LOG(FATAL) << "Unexpected type conversion from " << input_type
2654 << " to " << result_type;
2655 }
2656 break;
2657
Roland Levillain946e1432014-11-11 17:35:19 +00002658 case Primitive::kPrimInt:
2659 switch (input_type) {
2660 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002661 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002662 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002663 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002664 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002665 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002666 Address(CpuRegister(RSP), in.GetStackIndex()));
2667 } else {
2668 DCHECK(in.IsConstant());
2669 DCHECK(in.GetConstant()->IsLongConstant());
2670 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002671 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002672 }
2673 break;
2674
Roland Levillain3f8f9362014-12-02 17:45:01 +00002675 case Primitive::kPrimFloat: {
2676 // Processing a Dex `float-to-int' instruction.
2677 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2678 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002679 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002680
2681 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002682 // if input >= (float)INT_MAX goto done
2683 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002684 __ j(kAboveEqual, &done);
2685 // if input == NaN goto nan
2686 __ j(kUnordered, &nan);
2687 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002688 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002689 __ jmp(&done);
2690 __ Bind(&nan);
2691 // output = 0
2692 __ xorl(output, output);
2693 __ Bind(&done);
2694 break;
2695 }
2696
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002697 case Primitive::kPrimDouble: {
2698 // Processing a Dex `double-to-int' instruction.
2699 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2700 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002701 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002702
2703 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002704 // if input >= (double)INT_MAX goto done
2705 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002706 __ j(kAboveEqual, &done);
2707 // if input == NaN goto nan
2708 __ j(kUnordered, &nan);
2709 // output = double-to-int-truncate(input)
2710 __ cvttsd2si(output, input);
2711 __ jmp(&done);
2712 __ Bind(&nan);
2713 // output = 0
2714 __ xorl(output, output);
2715 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002716 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002717 }
Roland Levillain946e1432014-11-11 17:35:19 +00002718
2719 default:
2720 LOG(FATAL) << "Unexpected type conversion from " << input_type
2721 << " to " << result_type;
2722 }
2723 break;
2724
Roland Levillaindff1f282014-11-05 14:15:05 +00002725 case Primitive::kPrimLong:
2726 switch (input_type) {
2727 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002728 case Primitive::kPrimBoolean:
2729 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002730 case Primitive::kPrimByte:
2731 case Primitive::kPrimShort:
2732 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002733 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002734 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002735 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002736 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002737 break;
2738
Roland Levillain624279f2014-12-04 11:54:28 +00002739 case Primitive::kPrimFloat: {
2740 // Processing a Dex `float-to-long' instruction.
2741 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2742 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002743 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002744
Mark Mendell92e83bf2015-05-07 11:25:03 -04002745 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002746 // if input >= (float)LONG_MAX goto done
2747 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002748 __ j(kAboveEqual, &done);
2749 // if input == NaN goto nan
2750 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002751 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002752 __ cvttss2si(output, input, true);
2753 __ jmp(&done);
2754 __ Bind(&nan);
2755 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002756 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002757 __ Bind(&done);
2758 break;
2759 }
2760
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002761 case Primitive::kPrimDouble: {
2762 // Processing a Dex `double-to-long' instruction.
2763 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2764 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002765 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002766
Mark Mendell92e83bf2015-05-07 11:25:03 -04002767 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002768 // if input >= (double)LONG_MAX goto done
2769 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002770 __ j(kAboveEqual, &done);
2771 // if input == NaN goto nan
2772 __ j(kUnordered, &nan);
2773 // output = double-to-long-truncate(input)
2774 __ cvttsd2si(output, input, true);
2775 __ jmp(&done);
2776 __ Bind(&nan);
2777 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002778 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002779 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002780 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002781 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002782
2783 default:
2784 LOG(FATAL) << "Unexpected type conversion from " << input_type
2785 << " to " << result_type;
2786 }
2787 break;
2788
Roland Levillain981e4542014-11-14 11:47:14 +00002789 case Primitive::kPrimChar:
2790 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002791 case Primitive::kPrimLong:
2792 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002793 case Primitive::kPrimBoolean:
2794 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002795 case Primitive::kPrimByte:
2796 case Primitive::kPrimShort:
2797 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002798 // Processing a Dex `int-to-char' instruction.
2799 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002800 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002801 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002802 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002803 Address(CpuRegister(RSP), in.GetStackIndex()));
2804 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002805 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002806 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002807 }
2808 break;
2809
2810 default:
2811 LOG(FATAL) << "Unexpected type conversion from " << input_type
2812 << " to " << result_type;
2813 }
2814 break;
2815
Roland Levillaindff1f282014-11-05 14:15:05 +00002816 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002817 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002818 case Primitive::kPrimBoolean:
2819 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002820 case Primitive::kPrimByte:
2821 case Primitive::kPrimShort:
2822 case Primitive::kPrimInt:
2823 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002824 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002825 if (in.IsRegister()) {
2826 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2827 } else if (in.IsConstant()) {
2828 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2829 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002830 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002831 } else {
2832 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2833 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2834 }
Roland Levillaincff13742014-11-17 14:32:17 +00002835 break;
2836
2837 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002838 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002839 if (in.IsRegister()) {
2840 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2841 } else if (in.IsConstant()) {
2842 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2843 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002844 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002845 } else {
2846 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2847 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2848 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002849 break;
2850
Roland Levillaincff13742014-11-17 14:32:17 +00002851 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002852 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002853 if (in.IsFpuRegister()) {
2854 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2855 } else if (in.IsConstant()) {
2856 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2857 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002858 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002859 } else {
2860 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2861 Address(CpuRegister(RSP), in.GetStackIndex()));
2862 }
Roland Levillaincff13742014-11-17 14:32:17 +00002863 break;
2864
2865 default:
2866 LOG(FATAL) << "Unexpected type conversion from " << input_type
2867 << " to " << result_type;
2868 };
2869 break;
2870
Roland Levillaindff1f282014-11-05 14:15:05 +00002871 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002872 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002873 case Primitive::kPrimBoolean:
2874 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002875 case Primitive::kPrimByte:
2876 case Primitive::kPrimShort:
2877 case Primitive::kPrimInt:
2878 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002879 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002880 if (in.IsRegister()) {
2881 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2882 } else if (in.IsConstant()) {
2883 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2884 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002885 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002886 } else {
2887 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2888 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2889 }
Roland Levillaincff13742014-11-17 14:32:17 +00002890 break;
2891
2892 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002893 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002894 if (in.IsRegister()) {
2895 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2896 } else if (in.IsConstant()) {
2897 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2898 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002899 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002900 } else {
2901 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2902 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2903 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002904 break;
2905
Roland Levillaincff13742014-11-17 14:32:17 +00002906 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002907 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002908 if (in.IsFpuRegister()) {
2909 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2910 } else if (in.IsConstant()) {
2911 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2912 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002913 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002914 } else {
2915 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2916 Address(CpuRegister(RSP), in.GetStackIndex()));
2917 }
Roland Levillaincff13742014-11-17 14:32:17 +00002918 break;
2919
2920 default:
2921 LOG(FATAL) << "Unexpected type conversion from " << input_type
2922 << " to " << result_type;
2923 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002924 break;
2925
2926 default:
2927 LOG(FATAL) << "Unexpected type conversion from " << input_type
2928 << " to " << result_type;
2929 }
2930}
2931
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002932void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002933 LocationSummary* locations =
2934 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002935 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002936 case Primitive::kPrimInt: {
2937 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002938 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2939 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002940 break;
2941 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002942
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002943 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002944 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002945 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002946 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002947 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002948 break;
2949 }
2950
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002951 case Primitive::kPrimDouble:
2952 case Primitive::kPrimFloat: {
2953 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002954 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002955 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002956 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002957 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002958
2959 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002960 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002961 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002962}
2963
2964void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2965 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002966 Location first = locations->InAt(0);
2967 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002968 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002969
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002970 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002971 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002972 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002973 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2974 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002975 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2976 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002977 } else {
2978 __ leal(out.AsRegister<CpuRegister>(), Address(
2979 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2980 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002981 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002982 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2983 __ addl(out.AsRegister<CpuRegister>(),
2984 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2985 } else {
2986 __ leal(out.AsRegister<CpuRegister>(), Address(
2987 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2988 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002989 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002990 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002991 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002992 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002993 break;
2994 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002995
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002996 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002997 if (second.IsRegister()) {
2998 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2999 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003000 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3001 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003002 } else {
3003 __ leaq(out.AsRegister<CpuRegister>(), Address(
3004 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3005 }
3006 } else {
3007 DCHECK(second.IsConstant());
3008 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3009 int32_t int32_value = Low32Bits(value);
3010 DCHECK_EQ(int32_value, value);
3011 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3012 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3013 } else {
3014 __ leaq(out.AsRegister<CpuRegister>(), Address(
3015 first.AsRegister<CpuRegister>(), int32_value));
3016 }
3017 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003018 break;
3019 }
3020
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003021 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003022 if (second.IsFpuRegister()) {
3023 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3024 } else if (second.IsConstant()) {
3025 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003026 codegen_->LiteralFloatAddress(
3027 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003028 } else {
3029 DCHECK(second.IsStackSlot());
3030 __ addss(first.AsFpuRegister<XmmRegister>(),
3031 Address(CpuRegister(RSP), second.GetStackIndex()));
3032 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003033 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003034 }
3035
3036 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003037 if (second.IsFpuRegister()) {
3038 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3039 } else if (second.IsConstant()) {
3040 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003041 codegen_->LiteralDoubleAddress(
3042 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003043 } else {
3044 DCHECK(second.IsDoubleStackSlot());
3045 __ addsd(first.AsFpuRegister<XmmRegister>(),
3046 Address(CpuRegister(RSP), second.GetStackIndex()));
3047 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003048 break;
3049 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003050
3051 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003052 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003053 }
3054}
3055
3056void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003057 LocationSummary* locations =
3058 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003059 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003060 case Primitive::kPrimInt: {
3061 locations->SetInAt(0, Location::RequiresRegister());
3062 locations->SetInAt(1, Location::Any());
3063 locations->SetOut(Location::SameAsFirstInput());
3064 break;
3065 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003066 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003067 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003068 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003069 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003070 break;
3071 }
Calin Juravle11351682014-10-23 15:38:15 +01003072 case Primitive::kPrimFloat:
3073 case Primitive::kPrimDouble: {
3074 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003075 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003076 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003077 break;
Calin Juravle11351682014-10-23 15:38:15 +01003078 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003079 default:
Calin Juravle11351682014-10-23 15:38:15 +01003080 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003081 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003082}
3083
3084void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3085 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003086 Location first = locations->InAt(0);
3087 Location second = locations->InAt(1);
3088 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003089 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003090 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003091 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003092 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003093 } else if (second.IsConstant()) {
3094 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003095 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003096 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003097 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003098 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003099 break;
3100 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003101 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003102 if (second.IsConstant()) {
3103 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3104 DCHECK(IsInt<32>(value));
3105 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3106 } else {
3107 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3108 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003109 break;
3110 }
3111
Calin Juravle11351682014-10-23 15:38:15 +01003112 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003113 if (second.IsFpuRegister()) {
3114 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3115 } else if (second.IsConstant()) {
3116 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003117 codegen_->LiteralFloatAddress(
3118 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003119 } else {
3120 DCHECK(second.IsStackSlot());
3121 __ subss(first.AsFpuRegister<XmmRegister>(),
3122 Address(CpuRegister(RSP), second.GetStackIndex()));
3123 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003124 break;
Calin Juravle11351682014-10-23 15:38:15 +01003125 }
3126
3127 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003128 if (second.IsFpuRegister()) {
3129 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3130 } else if (second.IsConstant()) {
3131 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003132 codegen_->LiteralDoubleAddress(
3133 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003134 } else {
3135 DCHECK(second.IsDoubleStackSlot());
3136 __ subsd(first.AsFpuRegister<XmmRegister>(),
3137 Address(CpuRegister(RSP), second.GetStackIndex()));
3138 }
Calin Juravle11351682014-10-23 15:38:15 +01003139 break;
3140 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003141
3142 default:
Calin Juravle11351682014-10-23 15:38:15 +01003143 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003144 }
3145}
3146
Calin Juravle34bacdf2014-10-07 20:23:36 +01003147void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3148 LocationSummary* locations =
3149 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3150 switch (mul->GetResultType()) {
3151 case Primitive::kPrimInt: {
3152 locations->SetInAt(0, Location::RequiresRegister());
3153 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003154 if (mul->InputAt(1)->IsIntConstant()) {
3155 // Can use 3 operand multiply.
3156 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3157 } else {
3158 locations->SetOut(Location::SameAsFirstInput());
3159 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003160 break;
3161 }
3162 case Primitive::kPrimLong: {
3163 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003164 locations->SetInAt(1, Location::Any());
3165 if (mul->InputAt(1)->IsLongConstant() &&
3166 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003167 // Can use 3 operand multiply.
3168 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3169 } else {
3170 locations->SetOut(Location::SameAsFirstInput());
3171 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003172 break;
3173 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003174 case Primitive::kPrimFloat:
3175 case Primitive::kPrimDouble: {
3176 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003177 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003178 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003179 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003180 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003181
3182 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003183 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003184 }
3185}
3186
3187void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3188 LocationSummary* locations = mul->GetLocations();
3189 Location first = locations->InAt(0);
3190 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003191 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003192 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003193 case Primitive::kPrimInt:
3194 // The constant may have ended up in a register, so test explicitly to avoid
3195 // problems where the output may not be the same as the first operand.
3196 if (mul->InputAt(1)->IsIntConstant()) {
3197 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3198 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3199 } else if (second.IsRegister()) {
3200 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003201 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003202 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003203 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003204 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003205 __ imull(first.AsRegister<CpuRegister>(),
3206 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003207 }
3208 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003209 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003210 // The constant may have ended up in a register, so test explicitly to avoid
3211 // problems where the output may not be the same as the first operand.
3212 if (mul->InputAt(1)->IsLongConstant()) {
3213 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3214 if (IsInt<32>(value)) {
3215 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3216 Immediate(static_cast<int32_t>(value)));
3217 } else {
3218 // Have to use the constant area.
3219 DCHECK(first.Equals(out));
3220 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3221 }
3222 } else if (second.IsRegister()) {
3223 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003224 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003225 } else {
3226 DCHECK(second.IsDoubleStackSlot());
3227 DCHECK(first.Equals(out));
3228 __ imulq(first.AsRegister<CpuRegister>(),
3229 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003230 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003231 break;
3232 }
3233
Calin Juravleb5bfa962014-10-21 18:02:24 +01003234 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003235 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003236 if (second.IsFpuRegister()) {
3237 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3238 } else if (second.IsConstant()) {
3239 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003240 codegen_->LiteralFloatAddress(
3241 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003242 } else {
3243 DCHECK(second.IsStackSlot());
3244 __ mulss(first.AsFpuRegister<XmmRegister>(),
3245 Address(CpuRegister(RSP), second.GetStackIndex()));
3246 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003247 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003248 }
3249
3250 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003251 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003252 if (second.IsFpuRegister()) {
3253 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3254 } else if (second.IsConstant()) {
3255 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003256 codegen_->LiteralDoubleAddress(
3257 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003258 } else {
3259 DCHECK(second.IsDoubleStackSlot());
3260 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3261 Address(CpuRegister(RSP), second.GetStackIndex()));
3262 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003263 break;
3264 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003265
3266 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003267 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003268 }
3269}
3270
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003271void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3272 uint32_t stack_adjustment, bool is_float) {
3273 if (source.IsStackSlot()) {
3274 DCHECK(is_float);
3275 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3276 } else if (source.IsDoubleStackSlot()) {
3277 DCHECK(!is_float);
3278 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3279 } else {
3280 // Write the value to the temporary location on the stack and load to FP stack.
3281 if (is_float) {
3282 Location stack_temp = Location::StackSlot(temp_offset);
3283 codegen_->Move(stack_temp, source);
3284 __ flds(Address(CpuRegister(RSP), temp_offset));
3285 } else {
3286 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3287 codegen_->Move(stack_temp, source);
3288 __ fldl(Address(CpuRegister(RSP), temp_offset));
3289 }
3290 }
3291}
3292
3293void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3294 Primitive::Type type = rem->GetResultType();
3295 bool is_float = type == Primitive::kPrimFloat;
3296 size_t elem_size = Primitive::ComponentSize(type);
3297 LocationSummary* locations = rem->GetLocations();
3298 Location first = locations->InAt(0);
3299 Location second = locations->InAt(1);
3300 Location out = locations->Out();
3301
3302 // Create stack space for 2 elements.
3303 // TODO: enhance register allocator to ask for stack temporaries.
3304 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3305
3306 // Load the values to the FP stack in reverse order, using temporaries if needed.
3307 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3308 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3309
3310 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003311 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003312 __ Bind(&retry);
3313 __ fprem();
3314
3315 // Move FP status to AX.
3316 __ fstsw();
3317
3318 // And see if the argument reduction is complete. This is signaled by the
3319 // C2 FPU flag bit set to 0.
3320 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3321 __ j(kNotEqual, &retry);
3322
3323 // We have settled on the final value. Retrieve it into an XMM register.
3324 // Store FP top of stack to real stack.
3325 if (is_float) {
3326 __ fsts(Address(CpuRegister(RSP), 0));
3327 } else {
3328 __ fstl(Address(CpuRegister(RSP), 0));
3329 }
3330
3331 // Pop the 2 items from the FP stack.
3332 __ fucompp();
3333
3334 // Load the value from the stack into an XMM register.
3335 DCHECK(out.IsFpuRegister()) << out;
3336 if (is_float) {
3337 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3338 } else {
3339 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3340 }
3341
3342 // And remove the temporary stack space we allocated.
3343 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3344}
3345
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003346void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3347 DCHECK(instruction->IsDiv() || instruction->IsRem());
3348
3349 LocationSummary* locations = instruction->GetLocations();
3350 Location second = locations->InAt(1);
3351 DCHECK(second.IsConstant());
3352
3353 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3354 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003355 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003356
3357 DCHECK(imm == 1 || imm == -1);
3358
3359 switch (instruction->GetResultType()) {
3360 case Primitive::kPrimInt: {
3361 if (instruction->IsRem()) {
3362 __ xorl(output_register, output_register);
3363 } else {
3364 __ movl(output_register, input_register);
3365 if (imm == -1) {
3366 __ negl(output_register);
3367 }
3368 }
3369 break;
3370 }
3371
3372 case Primitive::kPrimLong: {
3373 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003374 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003375 } else {
3376 __ movq(output_register, input_register);
3377 if (imm == -1) {
3378 __ negq(output_register);
3379 }
3380 }
3381 break;
3382 }
3383
3384 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003385 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003386 }
3387}
3388
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003389void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003390 LocationSummary* locations = instruction->GetLocations();
3391 Location second = locations->InAt(1);
3392
3393 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3394 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3395
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003396 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003397 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3398 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003399
3400 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3401
3402 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003403 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003404 __ testl(numerator, numerator);
3405 __ cmov(kGreaterEqual, tmp, numerator);
3406 int shift = CTZ(imm);
3407 __ sarl(tmp, Immediate(shift));
3408
3409 if (imm < 0) {
3410 __ negl(tmp);
3411 }
3412
3413 __ movl(output_register, tmp);
3414 } else {
3415 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3416 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3417
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003418 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003419 __ addq(rdx, numerator);
3420 __ testq(numerator, numerator);
3421 __ cmov(kGreaterEqual, rdx, numerator);
3422 int shift = CTZ(imm);
3423 __ sarq(rdx, Immediate(shift));
3424
3425 if (imm < 0) {
3426 __ negq(rdx);
3427 }
3428
3429 __ movq(output_register, rdx);
3430 }
3431}
3432
3433void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3434 DCHECK(instruction->IsDiv() || instruction->IsRem());
3435
3436 LocationSummary* locations = instruction->GetLocations();
3437 Location second = locations->InAt(1);
3438
3439 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3440 : locations->GetTemp(0).AsRegister<CpuRegister>();
3441 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3442 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3443 : locations->Out().AsRegister<CpuRegister>();
3444 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3445
3446 DCHECK_EQ(RAX, eax.AsRegister());
3447 DCHECK_EQ(RDX, edx.AsRegister());
3448 if (instruction->IsDiv()) {
3449 DCHECK_EQ(RAX, out.AsRegister());
3450 } else {
3451 DCHECK_EQ(RDX, out.AsRegister());
3452 }
3453
3454 int64_t magic;
3455 int shift;
3456
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003457 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003458 if (instruction->GetResultType() == Primitive::kPrimInt) {
3459 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3460
3461 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3462
3463 __ movl(numerator, eax);
3464
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003465 __ movl(eax, Immediate(magic));
3466 __ imull(numerator);
3467
3468 if (imm > 0 && magic < 0) {
3469 __ addl(edx, numerator);
3470 } else if (imm < 0 && magic > 0) {
3471 __ subl(edx, numerator);
3472 }
3473
3474 if (shift != 0) {
3475 __ sarl(edx, Immediate(shift));
3476 }
3477
3478 __ movl(eax, edx);
3479 __ shrl(edx, Immediate(31));
3480 __ addl(edx, eax);
3481
3482 if (instruction->IsRem()) {
3483 __ movl(eax, numerator);
3484 __ imull(edx, Immediate(imm));
3485 __ subl(eax, edx);
3486 __ movl(edx, eax);
3487 } else {
3488 __ movl(eax, edx);
3489 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003490 } else {
3491 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3492
3493 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3494
3495 CpuRegister rax = eax;
3496 CpuRegister rdx = edx;
3497
3498 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3499
3500 // Save the numerator.
3501 __ movq(numerator, rax);
3502
3503 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003504 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003505
3506 // RDX:RAX = magic * numerator
3507 __ imulq(numerator);
3508
3509 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003510 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003511 __ addq(rdx, numerator);
3512 } else if (imm < 0 && magic > 0) {
3513 // RDX -= numerator
3514 __ subq(rdx, numerator);
3515 }
3516
3517 // Shift if needed.
3518 if (shift != 0) {
3519 __ sarq(rdx, Immediate(shift));
3520 }
3521
3522 // RDX += 1 if RDX < 0
3523 __ movq(rax, rdx);
3524 __ shrq(rdx, Immediate(63));
3525 __ addq(rdx, rax);
3526
3527 if (instruction->IsRem()) {
3528 __ movq(rax, numerator);
3529
3530 if (IsInt<32>(imm)) {
3531 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3532 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003533 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003534 }
3535
3536 __ subq(rax, rdx);
3537 __ movq(rdx, rax);
3538 } else {
3539 __ movq(rax, rdx);
3540 }
3541 }
3542}
3543
Calin Juravlebacfec32014-11-14 15:54:36 +00003544void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3545 DCHECK(instruction->IsDiv() || instruction->IsRem());
3546 Primitive::Type type = instruction->GetResultType();
3547 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3548
3549 bool is_div = instruction->IsDiv();
3550 LocationSummary* locations = instruction->GetLocations();
3551
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003552 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3553 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003554
Roland Levillain271ab9c2014-11-27 15:23:57 +00003555 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003556 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003557
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003558 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003559 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003560
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003561 if (imm == 0) {
3562 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3563 } else if (imm == 1 || imm == -1) {
3564 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003565 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003566 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003567 } else {
3568 DCHECK(imm <= -2 || imm >= 2);
3569 GenerateDivRemWithAnyConstant(instruction);
3570 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003571 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003572 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003573 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003574 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003575 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003576
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003577 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3578 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3579 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3580 // so it's safe to just use negl instead of more complex comparisons.
3581 if (type == Primitive::kPrimInt) {
3582 __ cmpl(second_reg, Immediate(-1));
3583 __ j(kEqual, slow_path->GetEntryLabel());
3584 // edx:eax <- sign-extended of eax
3585 __ cdq();
3586 // eax = quotient, edx = remainder
3587 __ idivl(second_reg);
3588 } else {
3589 __ cmpq(second_reg, Immediate(-1));
3590 __ j(kEqual, slow_path->GetEntryLabel());
3591 // rdx:rax <- sign-extended of rax
3592 __ cqo();
3593 // rax = quotient, rdx = remainder
3594 __ idivq(second_reg);
3595 }
3596 __ Bind(slow_path->GetExitLabel());
3597 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003598}
3599
Calin Juravle7c4954d2014-10-28 16:57:40 +00003600void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3601 LocationSummary* locations =
3602 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3603 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003604 case Primitive::kPrimInt:
3605 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003606 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003607 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003608 locations->SetOut(Location::SameAsFirstInput());
3609 // Intel uses edx:eax as the dividend.
3610 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003611 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3612 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3613 // output and request another temp.
3614 if (div->InputAt(1)->IsConstant()) {
3615 locations->AddTemp(Location::RequiresRegister());
3616 }
Calin Juravled0d48522014-11-04 16:40:20 +00003617 break;
3618 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003619
Calin Juravle7c4954d2014-10-28 16:57:40 +00003620 case Primitive::kPrimFloat:
3621 case Primitive::kPrimDouble: {
3622 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003623 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003624 locations->SetOut(Location::SameAsFirstInput());
3625 break;
3626 }
3627
3628 default:
3629 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3630 }
3631}
3632
3633void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3634 LocationSummary* locations = div->GetLocations();
3635 Location first = locations->InAt(0);
3636 Location second = locations->InAt(1);
3637 DCHECK(first.Equals(locations->Out()));
3638
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003639 Primitive::Type type = div->GetResultType();
3640 switch (type) {
3641 case Primitive::kPrimInt:
3642 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003643 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003644 break;
3645 }
3646
Calin Juravle7c4954d2014-10-28 16:57:40 +00003647 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003648 if (second.IsFpuRegister()) {
3649 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3650 } else if (second.IsConstant()) {
3651 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003652 codegen_->LiteralFloatAddress(
3653 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003654 } else {
3655 DCHECK(second.IsStackSlot());
3656 __ divss(first.AsFpuRegister<XmmRegister>(),
3657 Address(CpuRegister(RSP), second.GetStackIndex()));
3658 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003659 break;
3660 }
3661
3662 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003663 if (second.IsFpuRegister()) {
3664 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3665 } else if (second.IsConstant()) {
3666 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003667 codegen_->LiteralDoubleAddress(
3668 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003669 } else {
3670 DCHECK(second.IsDoubleStackSlot());
3671 __ divsd(first.AsFpuRegister<XmmRegister>(),
3672 Address(CpuRegister(RSP), second.GetStackIndex()));
3673 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003674 break;
3675 }
3676
3677 default:
3678 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3679 }
3680}
3681
Calin Juravlebacfec32014-11-14 15:54:36 +00003682void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003683 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003684 LocationSummary* locations =
3685 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003686
3687 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003688 case Primitive::kPrimInt:
3689 case Primitive::kPrimLong: {
3690 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003691 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003692 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3693 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003694 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3695 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3696 // output and request another temp.
3697 if (rem->InputAt(1)->IsConstant()) {
3698 locations->AddTemp(Location::RequiresRegister());
3699 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003700 break;
3701 }
3702
3703 case Primitive::kPrimFloat:
3704 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003705 locations->SetInAt(0, Location::Any());
3706 locations->SetInAt(1, Location::Any());
3707 locations->SetOut(Location::RequiresFpuRegister());
3708 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003709 break;
3710 }
3711
3712 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003713 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003714 }
3715}
3716
3717void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3718 Primitive::Type type = rem->GetResultType();
3719 switch (type) {
3720 case Primitive::kPrimInt:
3721 case Primitive::kPrimLong: {
3722 GenerateDivRemIntegral(rem);
3723 break;
3724 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003725 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003726 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003727 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003728 break;
3729 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003730 default:
3731 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3732 }
3733}
3734
Calin Juravled0d48522014-11-04 16:40:20 +00003735void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003736 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3737 ? LocationSummary::kCallOnSlowPath
3738 : LocationSummary::kNoCall;
3739 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003740 locations->SetInAt(0, Location::Any());
3741 if (instruction->HasUses()) {
3742 locations->SetOut(Location::SameAsFirstInput());
3743 }
3744}
3745
3746void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003747 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003748 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3749 codegen_->AddSlowPath(slow_path);
3750
3751 LocationSummary* locations = instruction->GetLocations();
3752 Location value = locations->InAt(0);
3753
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003754 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003755 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003756 case Primitive::kPrimByte:
3757 case Primitive::kPrimChar:
3758 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003759 case Primitive::kPrimInt: {
3760 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003761 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003762 __ j(kEqual, slow_path->GetEntryLabel());
3763 } else if (value.IsStackSlot()) {
3764 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3765 __ j(kEqual, slow_path->GetEntryLabel());
3766 } else {
3767 DCHECK(value.IsConstant()) << value;
3768 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3769 __ jmp(slow_path->GetEntryLabel());
3770 }
3771 }
3772 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003773 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003774 case Primitive::kPrimLong: {
3775 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003776 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003777 __ j(kEqual, slow_path->GetEntryLabel());
3778 } else if (value.IsDoubleStackSlot()) {
3779 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3780 __ j(kEqual, slow_path->GetEntryLabel());
3781 } else {
3782 DCHECK(value.IsConstant()) << value;
3783 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3784 __ jmp(slow_path->GetEntryLabel());
3785 }
3786 }
3787 break;
3788 }
3789 default:
3790 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003791 }
Calin Juravled0d48522014-11-04 16:40:20 +00003792}
3793
Calin Juravle9aec02f2014-11-18 23:06:35 +00003794void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3795 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3796
3797 LocationSummary* locations =
3798 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3799
3800 switch (op->GetResultType()) {
3801 case Primitive::kPrimInt:
3802 case Primitive::kPrimLong: {
3803 locations->SetInAt(0, Location::RequiresRegister());
3804 // The shift count needs to be in CL.
3805 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3806 locations->SetOut(Location::SameAsFirstInput());
3807 break;
3808 }
3809 default:
3810 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3811 }
3812}
3813
3814void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3815 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3816
3817 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003818 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003819 Location second = locations->InAt(1);
3820
3821 switch (op->GetResultType()) {
3822 case Primitive::kPrimInt: {
3823 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003824 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003825 if (op->IsShl()) {
3826 __ shll(first_reg, second_reg);
3827 } else if (op->IsShr()) {
3828 __ sarl(first_reg, second_reg);
3829 } else {
3830 __ shrl(first_reg, second_reg);
3831 }
3832 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003833 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003834 if (op->IsShl()) {
3835 __ shll(first_reg, imm);
3836 } else if (op->IsShr()) {
3837 __ sarl(first_reg, imm);
3838 } else {
3839 __ shrl(first_reg, imm);
3840 }
3841 }
3842 break;
3843 }
3844 case Primitive::kPrimLong: {
3845 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003846 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003847 if (op->IsShl()) {
3848 __ shlq(first_reg, second_reg);
3849 } else if (op->IsShr()) {
3850 __ sarq(first_reg, second_reg);
3851 } else {
3852 __ shrq(first_reg, second_reg);
3853 }
3854 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003855 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003856 if (op->IsShl()) {
3857 __ shlq(first_reg, imm);
3858 } else if (op->IsShr()) {
3859 __ sarq(first_reg, imm);
3860 } else {
3861 __ shrq(first_reg, imm);
3862 }
3863 }
3864 break;
3865 }
3866 default:
3867 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003868 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003869 }
3870}
3871
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003872void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3873 LocationSummary* locations =
3874 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3875
3876 switch (ror->GetResultType()) {
3877 case Primitive::kPrimInt:
3878 case Primitive::kPrimLong: {
3879 locations->SetInAt(0, Location::RequiresRegister());
3880 // The shift count needs to be in CL (unless it is a constant).
3881 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3882 locations->SetOut(Location::SameAsFirstInput());
3883 break;
3884 }
3885 default:
3886 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3887 UNREACHABLE();
3888 }
3889}
3890
3891void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3892 LocationSummary* locations = ror->GetLocations();
3893 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3894 Location second = locations->InAt(1);
3895
3896 switch (ror->GetResultType()) {
3897 case Primitive::kPrimInt:
3898 if (second.IsRegister()) {
3899 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3900 __ rorl(first_reg, second_reg);
3901 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003902 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003903 __ rorl(first_reg, imm);
3904 }
3905 break;
3906 case Primitive::kPrimLong:
3907 if (second.IsRegister()) {
3908 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3909 __ rorq(first_reg, second_reg);
3910 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003911 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003912 __ rorq(first_reg, imm);
3913 }
3914 break;
3915 default:
3916 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3917 UNREACHABLE();
3918 }
3919}
3920
Calin Juravle9aec02f2014-11-18 23:06:35 +00003921void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3922 HandleShift(shl);
3923}
3924
3925void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3926 HandleShift(shl);
3927}
3928
3929void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3930 HandleShift(shr);
3931}
3932
3933void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3934 HandleShift(shr);
3935}
3936
3937void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3938 HandleShift(ushr);
3939}
3940
3941void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3942 HandleShift(ushr);
3943}
3944
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003945void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003946 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003947 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003948 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003949 if (instruction->IsStringAlloc()) {
3950 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3951 } else {
3952 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3953 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3954 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003955 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003956}
3957
3958void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003959 // Note: if heap poisoning is enabled, the entry point takes cares
3960 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003961 if (instruction->IsStringAlloc()) {
3962 // String is allocated through StringFactory. Call NewEmptyString entry point.
3963 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3964 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3965 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3966 __ call(Address(temp, code_offset.SizeValue()));
3967 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3968 } else {
3969 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3970 instruction,
3971 instruction->GetDexPc(),
3972 nullptr);
3973 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3974 DCHECK(!codegen_->IsLeafMethod());
3975 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003976}
3977
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003978void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3979 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003980 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003981 InvokeRuntimeCallingConvention calling_convention;
3982 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003983 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003984 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003985 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003986}
3987
3988void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3989 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003990 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3991 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003992 // Note: if heap poisoning is enabled, the entry point takes cares
3993 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003994 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3995 instruction,
3996 instruction->GetDexPc(),
3997 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003998 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003999
4000 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004001}
4002
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004003void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004004 LocationSummary* locations =
4005 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004006 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4007 if (location.IsStackSlot()) {
4008 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4009 } else if (location.IsDoubleStackSlot()) {
4010 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4011 }
4012 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004013}
4014
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004015void InstructionCodeGeneratorX86_64::VisitParameterValue(
4016 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004017 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004018}
4019
4020void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4021 LocationSummary* locations =
4022 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4023 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4024}
4025
4026void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4027 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4028 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004029}
4030
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004031void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4032 LocationSummary* locations =
4033 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4034 locations->SetInAt(0, Location::RequiresRegister());
4035 locations->SetOut(Location::RequiresRegister());
4036}
4037
4038void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4039 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004040 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004041 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004042 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004043 __ movq(locations->Out().AsRegister<CpuRegister>(),
4044 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004045 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004046 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
4047 instruction->GetIndex() % ImTable::kSize, kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004048 __ movq(locations->Out().AsRegister<CpuRegister>(),
4049 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4050 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004051 __ movq(locations->Out().AsRegister<CpuRegister>(),
4052 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004053 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004054}
4055
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004056void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004057 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004058 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004059 locations->SetInAt(0, Location::RequiresRegister());
4060 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004061}
4062
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004063void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4064 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004065 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4066 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004067 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004068 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004069 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004070 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004071 break;
4072
4073 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004074 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004075 break;
4076
4077 default:
4078 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4079 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004080}
4081
David Brazdil66d126e2015-04-03 16:02:44 +01004082void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4083 LocationSummary* locations =
4084 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4085 locations->SetInAt(0, Location::RequiresRegister());
4086 locations->SetOut(Location::SameAsFirstInput());
4087}
4088
4089void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004090 LocationSummary* locations = bool_not->GetLocations();
4091 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4092 locations->Out().AsRegister<CpuRegister>().AsRegister());
4093 Location out = locations->Out();
4094 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4095}
4096
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004097void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004098 LocationSummary* locations =
4099 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004100 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004101 locations->SetInAt(i, Location::Any());
4102 }
4103 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004104}
4105
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004106void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004107 LOG(FATAL) << "Unimplemented";
4108}
4109
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004110void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004111 /*
4112 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004113 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004114 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4115 */
4116 switch (kind) {
4117 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004118 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004119 break;
4120 }
4121 case MemBarrierKind::kAnyStore:
4122 case MemBarrierKind::kLoadAny:
4123 case MemBarrierKind::kStoreStore: {
4124 // nop
4125 break;
4126 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004127 case MemBarrierKind::kNTStoreStore:
4128 // Non-Temporal Store/Store needs an explicit fence.
4129 MemoryFence(/* non-temporal */ true);
4130 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004131 }
4132}
4133
4134void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4135 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4136
Roland Levillain0d5a2812015-11-13 10:07:31 +00004137 bool object_field_get_with_read_barrier =
4138 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004139 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004140 new (GetGraph()->GetArena()) LocationSummary(instruction,
4141 object_field_get_with_read_barrier ?
4142 LocationSummary::kCallOnSlowPath :
4143 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004144 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004145 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4146 locations->SetOut(Location::RequiresFpuRegister());
4147 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004148 // The output overlaps for an object field get when read barriers
4149 // are enabled: we do not want the move to overwrite the object's
4150 // location, as we need it to emit the read barrier.
4151 locations->SetOut(
4152 Location::RequiresRegister(),
4153 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004154 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004155 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4156 // We need a temporary register for the read barrier marking slow
4157 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4158 locations->AddTemp(Location::RequiresRegister());
4159 }
Calin Juravle52c48962014-12-16 17:02:57 +00004160}
4161
4162void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4163 const FieldInfo& field_info) {
4164 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4165
4166 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004167 Location base_loc = locations->InAt(0);
4168 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004169 Location out = locations->Out();
4170 bool is_volatile = field_info.IsVolatile();
4171 Primitive::Type field_type = field_info.GetFieldType();
4172 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4173
4174 switch (field_type) {
4175 case Primitive::kPrimBoolean: {
4176 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4177 break;
4178 }
4179
4180 case Primitive::kPrimByte: {
4181 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4182 break;
4183 }
4184
4185 case Primitive::kPrimShort: {
4186 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4187 break;
4188 }
4189
4190 case Primitive::kPrimChar: {
4191 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4192 break;
4193 }
4194
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004195 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004196 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4197 break;
4198 }
4199
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004200 case Primitive::kPrimNot: {
4201 // /* HeapReference<Object> */ out = *(base + offset)
4202 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4203 Location temp_loc = locations->GetTemp(0);
4204 // Note that a potential implicit null check is handled in this
4205 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4206 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4207 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4208 if (is_volatile) {
4209 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4210 }
4211 } else {
4212 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4213 codegen_->MaybeRecordImplicitNullCheck(instruction);
4214 if (is_volatile) {
4215 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4216 }
4217 // If read barriers are enabled, emit read barriers other than
4218 // Baker's using a slow path (and also unpoison the loaded
4219 // reference, if heap poisoning is enabled).
4220 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4221 }
4222 break;
4223 }
4224
Calin Juravle52c48962014-12-16 17:02:57 +00004225 case Primitive::kPrimLong: {
4226 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4227 break;
4228 }
4229
4230 case Primitive::kPrimFloat: {
4231 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4232 break;
4233 }
4234
4235 case Primitive::kPrimDouble: {
4236 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4237 break;
4238 }
4239
4240 case Primitive::kPrimVoid:
4241 LOG(FATAL) << "Unreachable type " << field_type;
4242 UNREACHABLE();
4243 }
4244
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004245 if (field_type == Primitive::kPrimNot) {
4246 // Potential implicit null checks, in the case of reference
4247 // fields, are handled in the previous switch statement.
4248 } else {
4249 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004250 }
Roland Levillain4d027112015-07-01 15:41:14 +01004251
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004252 if (is_volatile) {
4253 if (field_type == Primitive::kPrimNot) {
4254 // Memory barriers, in the case of references, are also handled
4255 // in the previous switch statement.
4256 } else {
4257 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4258 }
Roland Levillain4d027112015-07-01 15:41:14 +01004259 }
Calin Juravle52c48962014-12-16 17:02:57 +00004260}
4261
4262void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4263 const FieldInfo& field_info) {
4264 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4265
4266 LocationSummary* locations =
4267 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004268 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004269 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004270 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004271 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004272
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004273 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004274 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004275 if (is_volatile) {
4276 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4277 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4278 } else {
4279 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4280 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004281 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004282 if (is_volatile) {
4283 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4284 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4285 } else {
4286 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4287 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004288 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004289 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004290 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004291 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004292 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004293 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4294 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004295 locations->AddTemp(Location::RequiresRegister());
4296 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004297}
4298
Calin Juravle52c48962014-12-16 17:02:57 +00004299void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004300 const FieldInfo& field_info,
4301 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004302 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4303
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004304 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004305 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4306 Location value = locations->InAt(1);
4307 bool is_volatile = field_info.IsVolatile();
4308 Primitive::Type field_type = field_info.GetFieldType();
4309 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4310
4311 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004312 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004313 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004314
Mark Mendellea5af682015-10-22 17:35:49 -04004315 bool maybe_record_implicit_null_check_done = false;
4316
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004317 switch (field_type) {
4318 case Primitive::kPrimBoolean:
4319 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004320 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004321 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004322 __ movb(Address(base, offset), Immediate(v));
4323 } else {
4324 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4325 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004326 break;
4327 }
4328
4329 case Primitive::kPrimShort:
4330 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004331 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004332 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004333 __ movw(Address(base, offset), Immediate(v));
4334 } else {
4335 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4336 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004337 break;
4338 }
4339
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004340 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004341 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004342 if (value.IsConstant()) {
4343 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004344 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4345 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4346 // Note: if heap poisoning is enabled, no need to poison
4347 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004348 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004349 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004350 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4351 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4352 __ movl(temp, value.AsRegister<CpuRegister>());
4353 __ PoisonHeapReference(temp);
4354 __ movl(Address(base, offset), temp);
4355 } else {
4356 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4357 }
Mark Mendell40741f32015-04-20 22:10:34 -04004358 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004359 break;
4360 }
4361
4362 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004363 if (value.IsConstant()) {
4364 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004365 codegen_->MoveInt64ToAddress(Address(base, offset),
4366 Address(base, offset + sizeof(int32_t)),
4367 v,
4368 instruction);
4369 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004370 } else {
4371 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4372 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004373 break;
4374 }
4375
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004376 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004377 if (value.IsConstant()) {
4378 int32_t v =
4379 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4380 __ movl(Address(base, offset), Immediate(v));
4381 } else {
4382 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4383 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004384 break;
4385 }
4386
4387 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004388 if (value.IsConstant()) {
4389 int64_t v =
4390 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4391 codegen_->MoveInt64ToAddress(Address(base, offset),
4392 Address(base, offset + sizeof(int32_t)),
4393 v,
4394 instruction);
4395 maybe_record_implicit_null_check_done = true;
4396 } else {
4397 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4398 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004399 break;
4400 }
4401
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004402 case Primitive::kPrimVoid:
4403 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004404 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004405 }
Calin Juravle52c48962014-12-16 17:02:57 +00004406
Mark Mendellea5af682015-10-22 17:35:49 -04004407 if (!maybe_record_implicit_null_check_done) {
4408 codegen_->MaybeRecordImplicitNullCheck(instruction);
4409 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004410
4411 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4412 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4413 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004414 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004415 }
4416
Calin Juravle52c48962014-12-16 17:02:57 +00004417 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004418 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004419 }
4420}
4421
4422void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4423 HandleFieldSet(instruction, instruction->GetFieldInfo());
4424}
4425
4426void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004427 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004428}
4429
4430void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004431 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004432}
4433
4434void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004435 HandleFieldGet(instruction, instruction->GetFieldInfo());
4436}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004437
Calin Juravle52c48962014-12-16 17:02:57 +00004438void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4439 HandleFieldGet(instruction);
4440}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004441
Calin Juravle52c48962014-12-16 17:02:57 +00004442void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4443 HandleFieldGet(instruction, instruction->GetFieldInfo());
4444}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004445
Calin Juravle52c48962014-12-16 17:02:57 +00004446void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4447 HandleFieldSet(instruction, instruction->GetFieldInfo());
4448}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004449
Calin Juravle52c48962014-12-16 17:02:57 +00004450void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004451 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004452}
4453
Calin Juravlee460d1d2015-09-29 04:52:17 +01004454void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4455 HUnresolvedInstanceFieldGet* instruction) {
4456 FieldAccessCallingConventionX86_64 calling_convention;
4457 codegen_->CreateUnresolvedFieldLocationSummary(
4458 instruction, instruction->GetFieldType(), calling_convention);
4459}
4460
4461void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4462 HUnresolvedInstanceFieldGet* instruction) {
4463 FieldAccessCallingConventionX86_64 calling_convention;
4464 codegen_->GenerateUnresolvedFieldAccess(instruction,
4465 instruction->GetFieldType(),
4466 instruction->GetFieldIndex(),
4467 instruction->GetDexPc(),
4468 calling_convention);
4469}
4470
4471void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4472 HUnresolvedInstanceFieldSet* instruction) {
4473 FieldAccessCallingConventionX86_64 calling_convention;
4474 codegen_->CreateUnresolvedFieldLocationSummary(
4475 instruction, instruction->GetFieldType(), calling_convention);
4476}
4477
4478void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4479 HUnresolvedInstanceFieldSet* instruction) {
4480 FieldAccessCallingConventionX86_64 calling_convention;
4481 codegen_->GenerateUnresolvedFieldAccess(instruction,
4482 instruction->GetFieldType(),
4483 instruction->GetFieldIndex(),
4484 instruction->GetDexPc(),
4485 calling_convention);
4486}
4487
4488void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4489 HUnresolvedStaticFieldGet* instruction) {
4490 FieldAccessCallingConventionX86_64 calling_convention;
4491 codegen_->CreateUnresolvedFieldLocationSummary(
4492 instruction, instruction->GetFieldType(), calling_convention);
4493}
4494
4495void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4496 HUnresolvedStaticFieldGet* instruction) {
4497 FieldAccessCallingConventionX86_64 calling_convention;
4498 codegen_->GenerateUnresolvedFieldAccess(instruction,
4499 instruction->GetFieldType(),
4500 instruction->GetFieldIndex(),
4501 instruction->GetDexPc(),
4502 calling_convention);
4503}
4504
4505void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4506 HUnresolvedStaticFieldSet* instruction) {
4507 FieldAccessCallingConventionX86_64 calling_convention;
4508 codegen_->CreateUnresolvedFieldLocationSummary(
4509 instruction, instruction->GetFieldType(), calling_convention);
4510}
4511
4512void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4513 HUnresolvedStaticFieldSet* instruction) {
4514 FieldAccessCallingConventionX86_64 calling_convention;
4515 codegen_->GenerateUnresolvedFieldAccess(instruction,
4516 instruction->GetFieldType(),
4517 instruction->GetFieldIndex(),
4518 instruction->GetDexPc(),
4519 calling_convention);
4520}
4521
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004522void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004523 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4524 ? LocationSummary::kCallOnSlowPath
4525 : LocationSummary::kNoCall;
4526 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4527 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004528 ? Location::RequiresRegister()
4529 : Location::Any();
4530 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004531 if (instruction->HasUses()) {
4532 locations->SetOut(Location::SameAsFirstInput());
4533 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004534}
4535
Calin Juravle2ae48182016-03-16 14:05:09 +00004536void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4537 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004538 return;
4539 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004540 LocationSummary* locations = instruction->GetLocations();
4541 Location obj = locations->InAt(0);
4542
4543 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004544 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004545}
4546
Calin Juravle2ae48182016-03-16 14:05:09 +00004547void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004548 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004549 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004550
4551 LocationSummary* locations = instruction->GetLocations();
4552 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004553
4554 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004555 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004556 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004557 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004558 } else {
4559 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004560 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004561 __ jmp(slow_path->GetEntryLabel());
4562 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004563 }
4564 __ j(kEqual, slow_path->GetEntryLabel());
4565}
4566
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004567void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004568 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004569}
4570
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004571void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004572 bool object_array_get_with_read_barrier =
4573 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004574 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004575 new (GetGraph()->GetArena()) LocationSummary(instruction,
4576 object_array_get_with_read_barrier ?
4577 LocationSummary::kCallOnSlowPath :
4578 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004579 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004580 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004581 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4582 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4583 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004584 // The output overlaps for an object array get when read barriers
4585 // are enabled: we do not want the move to overwrite the array's
4586 // location, as we need it to emit the read barrier.
4587 locations->SetOut(
4588 Location::RequiresRegister(),
4589 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004590 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004591 // We need a temporary register for the read barrier marking slow
4592 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4593 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4594 locations->AddTemp(Location::RequiresRegister());
4595 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004596}
4597
4598void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4599 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004600 Location obj_loc = locations->InAt(0);
4601 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004602 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004603 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004604 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004605
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004606 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004607 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004608 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004609 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004610 if (index.IsConstant()) {
4611 __ movzxb(out, Address(obj,
4612 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4613 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004614 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004615 }
4616 break;
4617 }
4618
4619 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004620 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004621 if (index.IsConstant()) {
4622 __ movsxb(out, Address(obj,
4623 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4624 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004625 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004626 }
4627 break;
4628 }
4629
4630 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004631 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004632 if (index.IsConstant()) {
4633 __ movsxw(out, Address(obj,
4634 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4635 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004636 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004637 }
4638 break;
4639 }
4640
4641 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004642 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004643 if (index.IsConstant()) {
4644 __ movzxw(out, Address(obj,
4645 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4646 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004647 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004648 }
4649 break;
4650 }
4651
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004652 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004653 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004654 if (index.IsConstant()) {
4655 __ movl(out, Address(obj,
4656 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4657 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004658 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004659 }
4660 break;
4661 }
4662
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004663 case Primitive::kPrimNot: {
4664 static_assert(
4665 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4666 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004667 // /* HeapReference<Object> */ out =
4668 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4669 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4670 Location temp = locations->GetTemp(0);
4671 // Note that a potential implicit null check is handled in this
4672 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4673 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4674 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4675 } else {
4676 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4677 if (index.IsConstant()) {
4678 uint32_t offset =
4679 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4680 __ movl(out, Address(obj, offset));
4681 codegen_->MaybeRecordImplicitNullCheck(instruction);
4682 // If read barriers are enabled, emit read barriers other than
4683 // Baker's using a slow path (and also unpoison the loaded
4684 // reference, if heap poisoning is enabled).
4685 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4686 } else {
4687 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4688 codegen_->MaybeRecordImplicitNullCheck(instruction);
4689 // If read barriers are enabled, emit read barriers other than
4690 // Baker's using a slow path (and also unpoison the loaded
4691 // reference, if heap poisoning is enabled).
4692 codegen_->MaybeGenerateReadBarrierSlow(
4693 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4694 }
4695 }
4696 break;
4697 }
4698
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004699 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004700 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004701 if (index.IsConstant()) {
4702 __ movq(out, Address(obj,
4703 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4704 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004705 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004706 }
4707 break;
4708 }
4709
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004710 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004711 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004712 if (index.IsConstant()) {
4713 __ movss(out, Address(obj,
4714 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4715 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004716 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004717 }
4718 break;
4719 }
4720
4721 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004722 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004723 if (index.IsConstant()) {
4724 __ movsd(out, Address(obj,
4725 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4726 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004727 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004728 }
4729 break;
4730 }
4731
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004732 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004733 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004734 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004735 }
Roland Levillain4d027112015-07-01 15:41:14 +01004736
4737 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004738 // Potential implicit null checks, in the case of reference
4739 // arrays, are handled in the previous switch statement.
4740 } else {
4741 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004742 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004743}
4744
4745void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004746 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004747
4748 bool needs_write_barrier =
4749 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004750 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004751 bool object_array_set_with_read_barrier =
4752 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004753
Nicolas Geoffray39468442014-09-02 15:17:15 +01004754 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004755 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004756 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004757 LocationSummary::kCallOnSlowPath :
4758 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004759
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004760 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004761 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4762 if (Primitive::IsFloatingPointType(value_type)) {
4763 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004764 } else {
4765 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4766 }
4767
4768 if (needs_write_barrier) {
4769 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004770
4771 // This first temporary register is possibly used for heap
4772 // reference poisoning and/or read barrier emission too.
4773 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004774 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004775 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004776}
4777
4778void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4779 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004780 Location array_loc = locations->InAt(0);
4781 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004782 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004783 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004784 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004785 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004786 bool needs_write_barrier =
4787 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004788 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4789 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4790 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004791
4792 switch (value_type) {
4793 case Primitive::kPrimBoolean:
4794 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004795 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4796 Address address = index.IsConstant()
4797 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4798 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4799 if (value.IsRegister()) {
4800 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004801 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004802 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004803 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004804 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004805 break;
4806 }
4807
4808 case Primitive::kPrimShort:
4809 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004810 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4811 Address address = index.IsConstant()
4812 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4813 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4814 if (value.IsRegister()) {
4815 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004816 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004817 DCHECK(value.IsConstant()) << value;
4818 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004819 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004820 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004821 break;
4822 }
4823
4824 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004825 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4826 Address address = index.IsConstant()
4827 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4828 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004829
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004830 if (!value.IsRegister()) {
4831 // Just setting null.
4832 DCHECK(instruction->InputAt(2)->IsNullConstant());
4833 DCHECK(value.IsConstant()) << value;
4834 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004835 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004836 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004837 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004838 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004839 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004840
4841 DCHECK(needs_write_barrier);
4842 CpuRegister register_value = value.AsRegister<CpuRegister>();
4843 NearLabel done, not_null, do_put;
4844 SlowPathCode* slow_path = nullptr;
4845 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004846 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004847 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4848 codegen_->AddSlowPath(slow_path);
4849 if (instruction->GetValueCanBeNull()) {
4850 __ testl(register_value, register_value);
4851 __ j(kNotEqual, &not_null);
4852 __ movl(address, Immediate(0));
4853 codegen_->MaybeRecordImplicitNullCheck(instruction);
4854 __ jmp(&done);
4855 __ Bind(&not_null);
4856 }
4857
Roland Levillain0d5a2812015-11-13 10:07:31 +00004858 if (kEmitCompilerReadBarrier) {
4859 // When read barriers are enabled, the type checking
4860 // instrumentation requires two read barriers:
4861 //
4862 // __ movl(temp2, temp);
4863 // // /* HeapReference<Class> */ temp = temp->component_type_
4864 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004865 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004866 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4867 //
4868 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4869 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004870 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004871 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4872 //
4873 // __ cmpl(temp, temp2);
4874 //
4875 // However, the second read barrier may trash `temp`, as it
4876 // is a temporary register, and as such would not be saved
4877 // along with live registers before calling the runtime (nor
4878 // restored afterwards). So in this case, we bail out and
4879 // delegate the work to the array set slow path.
4880 //
4881 // TODO: Extend the register allocator to support a new
4882 // "(locally) live temp" location so as to avoid always
4883 // going into the slow path when read barriers are enabled.
4884 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004885 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004886 // /* HeapReference<Class> */ temp = array->klass_
4887 __ movl(temp, Address(array, class_offset));
4888 codegen_->MaybeRecordImplicitNullCheck(instruction);
4889 __ MaybeUnpoisonHeapReference(temp);
4890
4891 // /* HeapReference<Class> */ temp = temp->component_type_
4892 __ movl(temp, Address(temp, component_offset));
4893 // If heap poisoning is enabled, no need to unpoison `temp`
4894 // nor the object reference in `register_value->klass`, as
4895 // we are comparing two poisoned references.
4896 __ cmpl(temp, Address(register_value, class_offset));
4897
4898 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4899 __ j(kEqual, &do_put);
4900 // If heap poisoning is enabled, the `temp` reference has
4901 // not been unpoisoned yet; unpoison it now.
4902 __ MaybeUnpoisonHeapReference(temp);
4903
4904 // /* HeapReference<Class> */ temp = temp->super_class_
4905 __ movl(temp, Address(temp, super_offset));
4906 // If heap poisoning is enabled, no need to unpoison
4907 // `temp`, as we are comparing against null below.
4908 __ testl(temp, temp);
4909 __ j(kNotEqual, slow_path->GetEntryLabel());
4910 __ Bind(&do_put);
4911 } else {
4912 __ j(kNotEqual, slow_path->GetEntryLabel());
4913 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004914 }
4915 }
4916
4917 if (kPoisonHeapReferences) {
4918 __ movl(temp, register_value);
4919 __ PoisonHeapReference(temp);
4920 __ movl(address, temp);
4921 } else {
4922 __ movl(address, register_value);
4923 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004924 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004925 codegen_->MaybeRecordImplicitNullCheck(instruction);
4926 }
4927
4928 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4929 codegen_->MarkGCCard(
4930 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4931 __ Bind(&done);
4932
4933 if (slow_path != nullptr) {
4934 __ Bind(slow_path->GetExitLabel());
4935 }
4936
4937 break;
4938 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004939
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004940 case Primitive::kPrimInt: {
4941 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4942 Address address = index.IsConstant()
4943 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4944 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4945 if (value.IsRegister()) {
4946 __ movl(address, value.AsRegister<CpuRegister>());
4947 } else {
4948 DCHECK(value.IsConstant()) << value;
4949 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4950 __ movl(address, Immediate(v));
4951 }
4952 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004953 break;
4954 }
4955
4956 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004957 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4958 Address address = index.IsConstant()
4959 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4960 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4961 if (value.IsRegister()) {
4962 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004963 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004964 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004965 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004966 Address address_high = index.IsConstant()
4967 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4968 offset + sizeof(int32_t))
4969 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4970 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004971 }
4972 break;
4973 }
4974
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004975 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004976 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4977 Address address = index.IsConstant()
4978 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4979 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004980 if (value.IsFpuRegister()) {
4981 __ movss(address, value.AsFpuRegister<XmmRegister>());
4982 } else {
4983 DCHECK(value.IsConstant());
4984 int32_t v =
4985 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4986 __ movl(address, Immediate(v));
4987 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004988 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004989 break;
4990 }
4991
4992 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004993 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4994 Address address = index.IsConstant()
4995 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4996 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004997 if (value.IsFpuRegister()) {
4998 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4999 codegen_->MaybeRecordImplicitNullCheck(instruction);
5000 } else {
5001 int64_t v =
5002 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5003 Address address_high = index.IsConstant()
5004 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
5005 offset + sizeof(int32_t))
5006 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
5007 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5008 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005009 break;
5010 }
5011
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005012 case Primitive::kPrimVoid:
5013 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005014 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005015 }
5016}
5017
5018void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005019 LocationSummary* locations =
5020 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005021 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005022 if (!instruction->IsEmittedAtUseSite()) {
5023 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5024 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005025}
5026
5027void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005028 if (instruction->IsEmittedAtUseSite()) {
5029 return;
5030 }
5031
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005032 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005033 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005034 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5035 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005036 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005037 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005038}
5039
5040void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005041 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5042 ? LocationSummary::kCallOnSlowPath
5043 : LocationSummary::kNoCall;
5044 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005045 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005046 HInstruction* length = instruction->InputAt(1);
5047 if (!length->IsEmittedAtUseSite()) {
5048 locations->SetInAt(1, Location::RegisterOrConstant(length));
5049 }
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005050 if (instruction->HasUses()) {
5051 locations->SetOut(Location::SameAsFirstInput());
5052 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005053}
5054
5055void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5056 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005057 Location index_loc = locations->InAt(0);
5058 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005059 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005060
Mark Mendell99dbd682015-04-22 16:18:52 -04005061 if (length_loc.IsConstant()) {
5062 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5063 if (index_loc.IsConstant()) {
5064 // BCE will remove the bounds check if we are guarenteed to pass.
5065 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5066 if (index < 0 || index >= length) {
5067 codegen_->AddSlowPath(slow_path);
5068 __ jmp(slow_path->GetEntryLabel());
5069 } else {
5070 // Some optimization after BCE may have generated this, and we should not
5071 // generate a bounds check if it is a valid range.
5072 }
5073 return;
5074 }
5075
5076 // We have to reverse the jump condition because the length is the constant.
5077 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5078 __ cmpl(index_reg, Immediate(length));
5079 codegen_->AddSlowPath(slow_path);
5080 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005081 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005082 HInstruction* array_length = instruction->InputAt(1);
5083 if (array_length->IsEmittedAtUseSite()) {
5084 // Address the length field in the array.
5085 DCHECK(array_length->IsArrayLength());
5086 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5087 Location array_loc = array_length->GetLocations()->InAt(0);
5088 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
5089 if (index_loc.IsConstant()) {
5090 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5091 __ cmpl(array_len, Immediate(value));
5092 } else {
5093 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5094 }
5095 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendell99dbd682015-04-22 16:18:52 -04005096 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005097 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5098 if (index_loc.IsConstant()) {
5099 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5100 __ cmpl(length, Immediate(value));
5101 } else {
5102 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5103 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005104 }
5105 codegen_->AddSlowPath(slow_path);
5106 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005107 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005108}
5109
5110void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5111 CpuRegister card,
5112 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005113 CpuRegister value,
5114 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005115 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005116 if (value_can_be_null) {
5117 __ testl(value, value);
5118 __ j(kEqual, &is_null);
5119 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005120 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5121 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005122 __ movq(temp, object);
5123 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005124 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005125 if (value_can_be_null) {
5126 __ Bind(&is_null);
5127 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005128}
5129
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005130void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005131 LOG(FATAL) << "Unimplemented";
5132}
5133
5134void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005135 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5136}
5137
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005138void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5139 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5140}
5141
5142void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005143 HBasicBlock* block = instruction->GetBlock();
5144 if (block->GetLoopInformation() != nullptr) {
5145 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5146 // The back edge will generate the suspend check.
5147 return;
5148 }
5149 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5150 // The goto will generate the suspend check.
5151 return;
5152 }
5153 GenerateSuspendCheck(instruction, nullptr);
5154}
5155
5156void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5157 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005158 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005159 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5160 if (slow_path == nullptr) {
5161 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5162 instruction->SetSlowPath(slow_path);
5163 codegen_->AddSlowPath(slow_path);
5164 if (successor != nullptr) {
5165 DCHECK(successor->IsLoopHeader());
5166 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5167 }
5168 } else {
5169 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5170 }
5171
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005172 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5173 /* no_rip */ true),
5174 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005175 if (successor == nullptr) {
5176 __ j(kNotEqual, slow_path->GetEntryLabel());
5177 __ Bind(slow_path->GetReturnLabel());
5178 } else {
5179 __ j(kEqual, codegen_->GetLabelOf(successor));
5180 __ jmp(slow_path->GetEntryLabel());
5181 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005182}
5183
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005184X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5185 return codegen_->GetAssembler();
5186}
5187
5188void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005189 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005190 Location source = move->GetSource();
5191 Location destination = move->GetDestination();
5192
5193 if (source.IsRegister()) {
5194 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005195 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005196 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005197 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005198 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005199 } else {
5200 DCHECK(destination.IsDoubleStackSlot());
5201 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005202 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005203 }
5204 } else if (source.IsStackSlot()) {
5205 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005206 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005207 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005208 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005209 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005210 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005211 } else {
5212 DCHECK(destination.IsStackSlot());
5213 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5214 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5215 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005216 } else if (source.IsDoubleStackSlot()) {
5217 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005218 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005219 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005220 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005221 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5222 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005223 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005224 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005225 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5226 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5227 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005228 } else if (source.IsConstant()) {
5229 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005230 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5231 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005232 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005233 if (value == 0) {
5234 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5235 } else {
5236 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5237 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005238 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005239 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005240 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005241 }
5242 } else if (constant->IsLongConstant()) {
5243 int64_t value = constant->AsLongConstant()->GetValue();
5244 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005245 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005246 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005247 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005248 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005249 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005250 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005251 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005252 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005253 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005254 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005255 } else {
5256 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005257 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005258 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5259 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005260 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005261 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005262 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005263 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005264 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005265 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005266 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005267 } else {
5268 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005269 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005270 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005271 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005272 } else if (source.IsFpuRegister()) {
5273 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005274 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005275 } else if (destination.IsStackSlot()) {
5276 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005277 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005278 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005279 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005280 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005281 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005282 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005283 }
5284}
5285
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005286void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005287 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005288 __ movl(Address(CpuRegister(RSP), mem), reg);
5289 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005290}
5291
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005292void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005293 ScratchRegisterScope ensure_scratch(
5294 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5295
5296 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5297 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5298 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5299 Address(CpuRegister(RSP), mem2 + stack_offset));
5300 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5301 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5302 CpuRegister(ensure_scratch.GetRegister()));
5303}
5304
Mark Mendell8a1c7282015-06-29 15:41:28 -04005305void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5306 __ movq(CpuRegister(TMP), reg1);
5307 __ movq(reg1, reg2);
5308 __ movq(reg2, CpuRegister(TMP));
5309}
5310
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005311void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5312 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5313 __ movq(Address(CpuRegister(RSP), mem), reg);
5314 __ movq(reg, CpuRegister(TMP));
5315}
5316
5317void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5318 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005319 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005320
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005321 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5322 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5323 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5324 Address(CpuRegister(RSP), mem2 + stack_offset));
5325 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5326 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5327 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005328}
5329
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005330void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5331 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5332 __ movss(Address(CpuRegister(RSP), mem), reg);
5333 __ movd(reg, CpuRegister(TMP));
5334}
5335
5336void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5337 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5338 __ movsd(Address(CpuRegister(RSP), mem), reg);
5339 __ movd(reg, CpuRegister(TMP));
5340}
5341
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005342void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005343 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005344 Location source = move->GetSource();
5345 Location destination = move->GetDestination();
5346
5347 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005348 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005349 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005350 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005351 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005352 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005353 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005354 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5355 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005356 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005357 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005358 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005359 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5360 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005361 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005362 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5363 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5364 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005365 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005366 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005367 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005368 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005369 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005370 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005371 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005372 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005373 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005374 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005375 }
5376}
5377
5378
5379void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5380 __ pushq(CpuRegister(reg));
5381}
5382
5383
5384void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5385 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005386}
5387
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005388void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005389 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005390 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5391 Immediate(mirror::Class::kStatusInitialized));
5392 __ j(kLess, slow_path->GetEntryLabel());
5393 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005394 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005395}
5396
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005397HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5398 HLoadClass::LoadKind desired_class_load_kind) {
5399 if (kEmitCompilerReadBarrier) {
5400 switch (desired_class_load_kind) {
5401 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5402 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5403 case HLoadClass::LoadKind::kBootImageAddress:
5404 // TODO: Implement for read barrier.
5405 return HLoadClass::LoadKind::kDexCacheViaMethod;
5406 default:
5407 break;
5408 }
5409 }
5410 switch (desired_class_load_kind) {
5411 case HLoadClass::LoadKind::kReferrersClass:
5412 break;
5413 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5414 DCHECK(!GetCompilerOptions().GetCompilePic());
5415 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5416 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5417 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5418 DCHECK(GetCompilerOptions().GetCompilePic());
5419 break;
5420 case HLoadClass::LoadKind::kBootImageAddress:
5421 break;
5422 case HLoadClass::LoadKind::kDexCacheAddress:
5423 DCHECK(Runtime::Current()->UseJitCompilation());
5424 break;
5425 case HLoadClass::LoadKind::kDexCachePcRelative:
5426 DCHECK(!Runtime::Current()->UseJitCompilation());
5427 break;
5428 case HLoadClass::LoadKind::kDexCacheViaMethod:
5429 break;
5430 }
5431 return desired_class_load_kind;
5432}
5433
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005434void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005435 if (cls->NeedsAccessCheck()) {
5436 InvokeRuntimeCallingConvention calling_convention;
5437 CodeGenerator::CreateLoadClassLocationSummary(
5438 cls,
5439 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5440 Location::RegisterLocation(RAX),
5441 /* code_generator_supports_read_barrier */ true);
5442 return;
5443 }
5444
5445 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
5446 ? LocationSummary::kCallOnSlowPath
5447 : LocationSummary::kNoCall;
5448 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
5449 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5450 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5451 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5452 locations->SetInAt(0, Location::RequiresRegister());
5453 }
5454 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005455}
5456
5457void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005458 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005459 if (cls->NeedsAccessCheck()) {
5460 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5461 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5462 cls,
5463 cls->GetDexPc(),
5464 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005465 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005466 return;
5467 }
5468
Roland Levillain0d5a2812015-11-13 10:07:31 +00005469 Location out_loc = locations->Out();
5470 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005471
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005472 bool generate_null_check = false;
5473 switch (cls->GetLoadKind()) {
5474 case HLoadClass::LoadKind::kReferrersClass: {
5475 DCHECK(!cls->CanCallRuntime());
5476 DCHECK(!cls->MustGenerateClinitCheck());
5477 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5478 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5479 GenerateGcRootFieldLoad(
5480 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5481 break;
5482 }
5483 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5484 DCHECK(!kEmitCompilerReadBarrier);
5485 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5486 codegen_->RecordTypePatch(cls);
5487 break;
5488 case HLoadClass::LoadKind::kBootImageAddress: {
5489 DCHECK(!kEmitCompilerReadBarrier);
5490 DCHECK_NE(cls->GetAddress(), 0u);
5491 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5492 __ movl(out, Immediate(address)); // Zero-extended.
5493 codegen_->RecordSimplePatch();
5494 break;
5495 }
5496 case HLoadClass::LoadKind::kDexCacheAddress: {
5497 DCHECK_NE(cls->GetAddress(), 0u);
5498 // /* GcRoot<mirror::Class> */ out = *address
5499 if (IsUint<32>(cls->GetAddress())) {
5500 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
5501 GenerateGcRootFieldLoad(cls, out_loc, address);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005502 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005503 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5504 __ movq(out, Immediate(cls->GetAddress()));
5505 GenerateGcRootFieldLoad(cls, out_loc, Address(out, 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005506 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005507 generate_null_check = !cls->IsInDexCache();
5508 break;
5509 }
5510 case HLoadClass::LoadKind::kDexCachePcRelative: {
5511 uint32_t offset = cls->GetDexCacheElementOffset();
5512 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5513 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5514 /* no_rip */ false);
5515 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5516 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label);
5517 generate_null_check = !cls->IsInDexCache();
5518 break;
5519 }
5520 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5521 // /* GcRoot<mirror::Class>[] */ out =
5522 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5523 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5524 __ movq(out,
5525 Address(current_method,
5526 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5527 // /* GcRoot<mirror::Class> */ out = out[type_index]
5528 GenerateGcRootFieldLoad(
5529 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
5530 generate_null_check = !cls->IsInDexCache();
5531 break;
5532 }
5533 default:
5534 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5535 UNREACHABLE();
5536 }
5537
5538 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5539 DCHECK(cls->CanCallRuntime());
5540 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5541 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5542 codegen_->AddSlowPath(slow_path);
5543 if (generate_null_check) {
5544 __ testl(out, out);
5545 __ j(kEqual, slow_path->GetEntryLabel());
5546 }
5547 if (cls->MustGenerateClinitCheck()) {
5548 GenerateClassInitializationCheck(slow_path, out);
5549 } else {
5550 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005551 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005552 }
5553}
5554
5555void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5556 LocationSummary* locations =
5557 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5558 locations->SetInAt(0, Location::RequiresRegister());
5559 if (check->HasUses()) {
5560 locations->SetOut(Location::SameAsFirstInput());
5561 }
5562}
5563
5564void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005565 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005566 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005567 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005568 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005569 GenerateClassInitializationCheck(slow_path,
5570 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005571}
5572
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005573HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5574 HLoadString::LoadKind desired_string_load_kind) {
5575 if (kEmitCompilerReadBarrier) {
5576 switch (desired_string_load_kind) {
5577 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5578 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5579 case HLoadString::LoadKind::kBootImageAddress:
5580 // TODO: Implement for read barrier.
5581 return HLoadString::LoadKind::kDexCacheViaMethod;
5582 default:
5583 break;
5584 }
5585 }
5586 switch (desired_string_load_kind) {
5587 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5588 DCHECK(!GetCompilerOptions().GetCompilePic());
5589 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5590 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5591 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5592 DCHECK(GetCompilerOptions().GetCompilePic());
5593 break;
5594 case HLoadString::LoadKind::kBootImageAddress:
5595 break;
5596 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005597 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005598 break;
5599 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005600 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005601 break;
5602 case HLoadString::LoadKind::kDexCacheViaMethod:
5603 break;
5604 }
5605 return desired_string_load_kind;
5606}
5607
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005608void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005609 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005610 ? LocationSummary::kCallOnSlowPath
5611 : LocationSummary::kNoCall;
5612 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005613 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5614 locations->SetInAt(0, Location::RequiresRegister());
5615 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005616 locations->SetOut(Location::RequiresRegister());
5617}
5618
5619void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005620 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005621 Location out_loc = locations->Out();
5622 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005623
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005624 switch (load->GetLoadKind()) {
5625 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5626 DCHECK(!kEmitCompilerReadBarrier);
5627 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5628 codegen_->RecordStringPatch(load);
5629 return; // No dex cache slow path.
5630 }
5631 case HLoadString::LoadKind::kBootImageAddress: {
5632 DCHECK(!kEmitCompilerReadBarrier);
5633 DCHECK_NE(load->GetAddress(), 0u);
5634 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5635 __ movl(out, Immediate(address)); // Zero-extended.
5636 codegen_->RecordSimplePatch();
5637 return; // No dex cache slow path.
5638 }
5639 case HLoadString::LoadKind::kDexCacheAddress: {
5640 DCHECK_NE(load->GetAddress(), 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005641 // /* GcRoot<mirror::String> */ out = *address
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005642 if (IsUint<32>(load->GetAddress())) {
5643 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5644 GenerateGcRootFieldLoad(load, out_loc, address);
5645 } else {
5646 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5647 __ movq(out, Immediate(load->GetAddress()));
5648 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5649 }
5650 break;
5651 }
5652 case HLoadString::LoadKind::kDexCachePcRelative: {
5653 uint32_t offset = load->GetDexCacheElementOffset();
5654 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5655 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5656 /* no_rip */ false);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005657 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005658 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5659 break;
5660 }
5661 case HLoadString::LoadKind::kDexCacheViaMethod: {
5662 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5663
5664 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5665 GenerateGcRootFieldLoad(
5666 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5667 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5668 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5669 // /* GcRoot<mirror::String> */ out = out[string_index]
5670 GenerateGcRootFieldLoad(
5671 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5672 break;
5673 }
5674 default:
5675 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5676 UNREACHABLE();
5677 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005678
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005679 if (!load->IsInDexCache()) {
5680 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5681 codegen_->AddSlowPath(slow_path);
5682 __ testl(out, out);
5683 __ j(kEqual, slow_path->GetEntryLabel());
5684 __ Bind(slow_path->GetExitLabel());
5685 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005686}
5687
David Brazdilcb1c0552015-08-04 16:22:25 +01005688static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005689 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5690 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005691}
5692
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005693void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5694 LocationSummary* locations =
5695 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5696 locations->SetOut(Location::RequiresRegister());
5697}
5698
5699void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005700 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5701}
5702
5703void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5704 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5705}
5706
5707void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5708 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005709}
5710
5711void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5712 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005713 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005714 InvokeRuntimeCallingConvention calling_convention;
5715 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5716}
5717
5718void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005719 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5720 instruction,
5721 instruction->GetDexPc(),
5722 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005723 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005724}
5725
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005726static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5727 return kEmitCompilerReadBarrier &&
5728 (kUseBakerReadBarrier ||
5729 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5730 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5731 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5732}
5733
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005734void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005735 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005736 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5737 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005738 case TypeCheckKind::kExactCheck:
5739 case TypeCheckKind::kAbstractClassCheck:
5740 case TypeCheckKind::kClassHierarchyCheck:
5741 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005742 call_kind =
5743 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005744 break;
5745 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005746 case TypeCheckKind::kUnresolvedCheck:
5747 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005748 call_kind = LocationSummary::kCallOnSlowPath;
5749 break;
5750 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005751
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005752 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005753 locations->SetInAt(0, Location::RequiresRegister());
5754 locations->SetInAt(1, Location::Any());
5755 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5756 locations->SetOut(Location::RequiresRegister());
5757 // When read barriers are enabled, we need a temporary register for
5758 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005759 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005760 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005761 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005762}
5763
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005764void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005765 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005766 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005767 Location obj_loc = locations->InAt(0);
5768 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005769 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005770 Location out_loc = locations->Out();
5771 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005772 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005773 locations->GetTemp(0) :
5774 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005775 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005776 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5777 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5778 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005779 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005780 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005781
5782 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005783 // Avoid null check if we know obj is not null.
5784 if (instruction->MustDoNullCheck()) {
5785 __ testl(obj, obj);
5786 __ j(kEqual, &zero);
5787 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005788
Roland Levillain0d5a2812015-11-13 10:07:31 +00005789 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005790 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005791
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005792 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005793 case TypeCheckKind::kExactCheck: {
5794 if (cls.IsRegister()) {
5795 __ cmpl(out, cls.AsRegister<CpuRegister>());
5796 } else {
5797 DCHECK(cls.IsStackSlot()) << cls;
5798 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5799 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005800 if (zero.IsLinked()) {
5801 // Classes must be equal for the instanceof to succeed.
5802 __ j(kNotEqual, &zero);
5803 __ movl(out, Immediate(1));
5804 __ jmp(&done);
5805 } else {
5806 __ setcc(kEqual, out);
5807 // setcc only sets the low byte.
5808 __ andl(out, Immediate(1));
5809 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005810 break;
5811 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005812
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005813 case TypeCheckKind::kAbstractClassCheck: {
5814 // If the class is abstract, we eagerly fetch the super class of the
5815 // object to avoid doing a comparison we know will fail.
5816 NearLabel loop, success;
5817 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005818 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005819 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005820 __ testl(out, out);
5821 // If `out` is null, we use it for the result, and jump to `done`.
5822 __ j(kEqual, &done);
5823 if (cls.IsRegister()) {
5824 __ cmpl(out, cls.AsRegister<CpuRegister>());
5825 } else {
5826 DCHECK(cls.IsStackSlot()) << cls;
5827 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5828 }
5829 __ j(kNotEqual, &loop);
5830 __ movl(out, Immediate(1));
5831 if (zero.IsLinked()) {
5832 __ jmp(&done);
5833 }
5834 break;
5835 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005836
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005837 case TypeCheckKind::kClassHierarchyCheck: {
5838 // Walk over the class hierarchy to find a match.
5839 NearLabel loop, success;
5840 __ Bind(&loop);
5841 if (cls.IsRegister()) {
5842 __ cmpl(out, cls.AsRegister<CpuRegister>());
5843 } else {
5844 DCHECK(cls.IsStackSlot()) << cls;
5845 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5846 }
5847 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005848 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005849 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005850 __ testl(out, out);
5851 __ j(kNotEqual, &loop);
5852 // If `out` is null, we use it for the result, and jump to `done`.
5853 __ jmp(&done);
5854 __ Bind(&success);
5855 __ movl(out, Immediate(1));
5856 if (zero.IsLinked()) {
5857 __ jmp(&done);
5858 }
5859 break;
5860 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005861
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005862 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005863 // Do an exact check.
5864 NearLabel exact_check;
5865 if (cls.IsRegister()) {
5866 __ cmpl(out, cls.AsRegister<CpuRegister>());
5867 } else {
5868 DCHECK(cls.IsStackSlot()) << cls;
5869 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5870 }
5871 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005872 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005873 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005874 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005875 __ testl(out, out);
5876 // If `out` is null, we use it for the result, and jump to `done`.
5877 __ j(kEqual, &done);
5878 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5879 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005880 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005881 __ movl(out, Immediate(1));
5882 __ jmp(&done);
5883 break;
5884 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005885
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005886 case TypeCheckKind::kArrayCheck: {
5887 if (cls.IsRegister()) {
5888 __ cmpl(out, cls.AsRegister<CpuRegister>());
5889 } else {
5890 DCHECK(cls.IsStackSlot()) << cls;
5891 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5892 }
5893 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005894 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5895 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005896 codegen_->AddSlowPath(slow_path);
5897 __ j(kNotEqual, slow_path->GetEntryLabel());
5898 __ movl(out, Immediate(1));
5899 if (zero.IsLinked()) {
5900 __ jmp(&done);
5901 }
5902 break;
5903 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005904
Calin Juravle98893e12015-10-02 21:05:03 +01005905 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005906 case TypeCheckKind::kInterfaceCheck: {
5907 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005908 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005909 // cases.
5910 //
5911 // We cannot directly call the InstanceofNonTrivial runtime
5912 // entry point without resorting to a type checking slow path
5913 // here (i.e. by calling InvokeRuntime directly), as it would
5914 // require to assign fixed registers for the inputs of this
5915 // HInstanceOf instruction (following the runtime calling
5916 // convention), which might be cluttered by the potential first
5917 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005918 //
5919 // TODO: Introduce a new runtime entry point taking the object
5920 // to test (instead of its class) as argument, and let it deal
5921 // with the read barrier issues. This will let us refactor this
5922 // case of the `switch` code as it was previously (with a direct
5923 // call to the runtime not using a type checking slow path).
5924 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005925 DCHECK(locations->OnlyCallsOnSlowPath());
5926 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5927 /* is_fatal */ false);
5928 codegen_->AddSlowPath(slow_path);
5929 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005930 if (zero.IsLinked()) {
5931 __ jmp(&done);
5932 }
5933 break;
5934 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005935 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005936
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005937 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005938 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005939 __ xorl(out, out);
5940 }
5941
5942 if (done.IsLinked()) {
5943 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005944 }
5945
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005946 if (slow_path != nullptr) {
5947 __ Bind(slow_path->GetExitLabel());
5948 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005949}
5950
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005951void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005952 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5953 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005954 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5955 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005956 case TypeCheckKind::kExactCheck:
5957 case TypeCheckKind::kAbstractClassCheck:
5958 case TypeCheckKind::kClassHierarchyCheck:
5959 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005960 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5961 LocationSummary::kCallOnSlowPath :
5962 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005963 break;
5964 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005965 case TypeCheckKind::kUnresolvedCheck:
5966 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005967 call_kind = LocationSummary::kCallOnSlowPath;
5968 break;
5969 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005970 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5971 locations->SetInAt(0, Location::RequiresRegister());
5972 locations->SetInAt(1, Location::Any());
5973 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5974 locations->AddTemp(Location::RequiresRegister());
5975 // When read barriers are enabled, we need an additional temporary
5976 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005977 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005978 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005979 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005980}
5981
5982void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005983 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005984 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005985 Location obj_loc = locations->InAt(0);
5986 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005987 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005988 Location temp_loc = locations->GetTemp(0);
5989 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005990 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005991 locations->GetTemp(1) :
5992 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005993 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5994 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5995 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5996 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005997
Roland Levillain0d5a2812015-11-13 10:07:31 +00005998 bool is_type_check_slow_path_fatal =
5999 (type_check_kind == TypeCheckKind::kExactCheck ||
6000 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
6001 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
6002 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
6003 !instruction->CanThrowIntoCatchBlock();
6004 SlowPathCode* type_check_slow_path =
6005 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6006 is_type_check_slow_path_fatal);
6007 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006008
Roland Levillain0d5a2812015-11-13 10:07:31 +00006009 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006010 case TypeCheckKind::kExactCheck:
6011 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006012 NearLabel done;
6013 // Avoid null check if we know obj is not null.
6014 if (instruction->MustDoNullCheck()) {
6015 __ testl(obj, obj);
6016 __ j(kEqual, &done);
6017 }
6018
6019 // /* HeapReference<Class> */ temp = obj->klass_
6020 GenerateReferenceLoadTwoRegisters(
6021 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6022
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006023 if (cls.IsRegister()) {
6024 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6025 } else {
6026 DCHECK(cls.IsStackSlot()) << cls;
6027 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6028 }
6029 // Jump to slow path for throwing the exception or doing a
6030 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006031 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006032 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006033 break;
6034 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006035
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006036 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006037 NearLabel done;
6038 // Avoid null check if we know obj is not null.
6039 if (instruction->MustDoNullCheck()) {
6040 __ testl(obj, obj);
6041 __ j(kEqual, &done);
6042 }
6043
6044 // /* HeapReference<Class> */ temp = obj->klass_
6045 GenerateReferenceLoadTwoRegisters(
6046 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6047
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006048 // If the class is abstract, we eagerly fetch the super class of the
6049 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006050 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006051 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006052 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006053 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006054
6055 // If the class reference currently in `temp` is not null, jump
6056 // to the `compare_classes` label to compare it with the checked
6057 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006058 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006059 __ j(kNotEqual, &compare_classes);
6060 // Otherwise, jump to the slow path to throw the exception.
6061 //
6062 // But before, move back the object's class into `temp` before
6063 // going into the slow path, as it has been overwritten in the
6064 // meantime.
6065 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006066 GenerateReferenceLoadTwoRegisters(
6067 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006068 __ jmp(type_check_slow_path->GetEntryLabel());
6069
6070 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006071 if (cls.IsRegister()) {
6072 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6073 } else {
6074 DCHECK(cls.IsStackSlot()) << cls;
6075 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6076 }
6077 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006078 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006079 break;
6080 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006081
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006082 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006083 NearLabel done;
6084 // Avoid null check if we know obj is not null.
6085 if (instruction->MustDoNullCheck()) {
6086 __ testl(obj, obj);
6087 __ j(kEqual, &done);
6088 }
6089
6090 // /* HeapReference<Class> */ temp = obj->klass_
6091 GenerateReferenceLoadTwoRegisters(
6092 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6093
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006094 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006095 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006096 __ Bind(&loop);
6097 if (cls.IsRegister()) {
6098 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6099 } else {
6100 DCHECK(cls.IsStackSlot()) << cls;
6101 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6102 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006103 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006104
Roland Levillain0d5a2812015-11-13 10:07:31 +00006105 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006106 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006107
6108 // If the class reference currently in `temp` is not null, jump
6109 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006110 __ testl(temp, temp);
6111 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006112 // Otherwise, jump to the slow path to throw the exception.
6113 //
6114 // But before, move back the object's class into `temp` before
6115 // going into the slow path, as it has been overwritten in the
6116 // meantime.
6117 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006118 GenerateReferenceLoadTwoRegisters(
6119 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006120 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006121 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006122 break;
6123 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006124
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006125 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006126 // We cannot use a NearLabel here, as its range might be too
6127 // short in some cases when read barriers are enabled. This has
6128 // been observed for instance when the code emitted for this
6129 // case uses high x86-64 registers (R8-R15).
6130 Label done;
6131 // Avoid null check if we know obj is not null.
6132 if (instruction->MustDoNullCheck()) {
6133 __ testl(obj, obj);
6134 __ j(kEqual, &done);
6135 }
6136
6137 // /* HeapReference<Class> */ temp = obj->klass_
6138 GenerateReferenceLoadTwoRegisters(
6139 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6140
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006141 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006142 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006143 if (cls.IsRegister()) {
6144 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6145 } else {
6146 DCHECK(cls.IsStackSlot()) << cls;
6147 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6148 }
6149 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006150
6151 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006152 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006153 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006154
6155 // If the component type is not null (i.e. the object is indeed
6156 // an array), jump to label `check_non_primitive_component_type`
6157 // to further check that this component type is not a primitive
6158 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006159 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006160 __ j(kNotEqual, &check_non_primitive_component_type);
6161 // Otherwise, jump to the slow path to throw the exception.
6162 //
6163 // But before, move back the object's class into `temp` before
6164 // going into the slow path, as it has been overwritten in the
6165 // meantime.
6166 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006167 GenerateReferenceLoadTwoRegisters(
6168 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006169 __ jmp(type_check_slow_path->GetEntryLabel());
6170
6171 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006172 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006173 __ j(kEqual, &done);
6174 // Same comment as above regarding `temp` and the slow path.
6175 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006176 GenerateReferenceLoadTwoRegisters(
6177 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006178 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006179 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006180 break;
6181 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006182
Calin Juravle98893e12015-10-02 21:05:03 +01006183 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006184 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006185 NearLabel done;
6186 // Avoid null check if we know obj is not null.
6187 if (instruction->MustDoNullCheck()) {
6188 __ testl(obj, obj);
6189 __ j(kEqual, &done);
6190 }
6191
6192 // /* HeapReference<Class> */ temp = obj->klass_
6193 GenerateReferenceLoadTwoRegisters(
6194 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6195
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006196 // We always go into the type check slow path for the unresolved
6197 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006198 //
6199 // We cannot directly call the CheckCast runtime entry point
6200 // without resorting to a type checking slow path here (i.e. by
6201 // calling InvokeRuntime directly), as it would require to
6202 // assign fixed registers for the inputs of this HInstanceOf
6203 // instruction (following the runtime calling convention), which
6204 // might be cluttered by the potential first read barrier
6205 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006206 //
6207 // TODO: Introduce a new runtime entry point taking the object
6208 // to test (instead of its class) as argument, and let it deal
6209 // with the read barrier issues. This will let us refactor this
6210 // case of the `switch` code as it was previously (with a direct
6211 // call to the runtime not using a type checking slow path).
6212 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006213 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006214 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006215 break;
6216 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006217
Roland Levillain0d5a2812015-11-13 10:07:31 +00006218 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006219}
6220
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006221void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6222 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006223 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006224 InvokeRuntimeCallingConvention calling_convention;
6225 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6226}
6227
6228void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006229 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6230 : QUICK_ENTRY_POINT(pUnlockObject),
6231 instruction,
6232 instruction->GetDexPc(),
6233 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006234 if (instruction->IsEnter()) {
6235 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6236 } else {
6237 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6238 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006239}
6240
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006241void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6242void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6243void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6244
6245void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6246 LocationSummary* locations =
6247 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6248 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6249 || instruction->GetResultType() == Primitive::kPrimLong);
6250 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006251 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006252 locations->SetOut(Location::SameAsFirstInput());
6253}
6254
6255void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6256 HandleBitwiseOperation(instruction);
6257}
6258
6259void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6260 HandleBitwiseOperation(instruction);
6261}
6262
6263void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6264 HandleBitwiseOperation(instruction);
6265}
6266
6267void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6268 LocationSummary* locations = instruction->GetLocations();
6269 Location first = locations->InAt(0);
6270 Location second = locations->InAt(1);
6271 DCHECK(first.Equals(locations->Out()));
6272
6273 if (instruction->GetResultType() == Primitive::kPrimInt) {
6274 if (second.IsRegister()) {
6275 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006276 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006277 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006278 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006279 } else {
6280 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006281 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006282 }
6283 } else if (second.IsConstant()) {
6284 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6285 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006286 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006287 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006288 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006289 } else {
6290 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006291 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006292 }
6293 } else {
6294 Address address(CpuRegister(RSP), second.GetStackIndex());
6295 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006296 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006297 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006298 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006299 } else {
6300 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006301 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006302 }
6303 }
6304 } else {
6305 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006306 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6307 bool second_is_constant = false;
6308 int64_t value = 0;
6309 if (second.IsConstant()) {
6310 second_is_constant = true;
6311 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006312 }
Mark Mendell40741f32015-04-20 22:10:34 -04006313 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006314
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006315 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006316 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006317 if (is_int32_value) {
6318 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6319 } else {
6320 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6321 }
6322 } else if (second.IsDoubleStackSlot()) {
6323 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006324 } else {
6325 __ andq(first_reg, second.AsRegister<CpuRegister>());
6326 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006327 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006328 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006329 if (is_int32_value) {
6330 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6331 } else {
6332 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6333 }
6334 } else if (second.IsDoubleStackSlot()) {
6335 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006336 } else {
6337 __ orq(first_reg, second.AsRegister<CpuRegister>());
6338 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006339 } else {
6340 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006341 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006342 if (is_int32_value) {
6343 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6344 } else {
6345 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6346 }
6347 } else if (second.IsDoubleStackSlot()) {
6348 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006349 } else {
6350 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6351 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006352 }
6353 }
6354}
6355
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006356void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6357 Location out,
6358 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006359 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006360 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6361 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006362 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006363 if (kUseBakerReadBarrier) {
6364 // Load with fast path based Baker's read barrier.
6365 // /* HeapReference<Object> */ out = *(out + offset)
6366 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006367 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006368 } else {
6369 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006370 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006371 // in the following move operation, as we will need it for the
6372 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006373 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006374 // /* HeapReference<Object> */ out = *(out + offset)
6375 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006376 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006377 }
6378 } else {
6379 // Plain load with no read barrier.
6380 // /* HeapReference<Object> */ out = *(out + offset)
6381 __ movl(out_reg, Address(out_reg, offset));
6382 __ MaybeUnpoisonHeapReference(out_reg);
6383 }
6384}
6385
6386void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6387 Location out,
6388 Location obj,
6389 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006390 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006391 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6392 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6393 if (kEmitCompilerReadBarrier) {
6394 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006395 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006396 // Load with fast path based Baker's read barrier.
6397 // /* HeapReference<Object> */ out = *(obj + offset)
6398 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006399 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006400 } else {
6401 // Load with slow path based read barrier.
6402 // /* HeapReference<Object> */ out = *(obj + offset)
6403 __ movl(out_reg, Address(obj_reg, offset));
6404 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6405 }
6406 } else {
6407 // Plain load with no read barrier.
6408 // /* HeapReference<Object> */ out = *(obj + offset)
6409 __ movl(out_reg, Address(obj_reg, offset));
6410 __ MaybeUnpoisonHeapReference(out_reg);
6411 }
6412}
6413
6414void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6415 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006416 const Address& address,
6417 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006418 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6419 if (kEmitCompilerReadBarrier) {
6420 if (kUseBakerReadBarrier) {
6421 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6422 // Baker's read barrier are used:
6423 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006424 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006425 // if (Thread::Current()->GetIsGcMarking()) {
6426 // root = ReadBarrier::Mark(root)
6427 // }
6428
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006429 // /* GcRoot<mirror::Object> */ root = *address
6430 __ movl(root_reg, address);
6431 if (fixup_label != nullptr) {
6432 __ Bind(fixup_label);
6433 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006434 static_assert(
6435 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6436 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6437 "have different sizes.");
6438 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6439 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6440 "have different sizes.");
6441
6442 // Slow path used to mark the GC root `root`.
6443 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006444 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006445 codegen_->AddSlowPath(slow_path);
6446
6447 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6448 /* no_rip */ true),
6449 Immediate(0));
6450 __ j(kNotEqual, slow_path->GetEntryLabel());
6451 __ Bind(slow_path->GetExitLabel());
6452 } else {
6453 // GC root loaded through a slow path for read barriers other
6454 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006455 // /* GcRoot<mirror::Object>* */ root = address
6456 __ leaq(root_reg, address);
6457 if (fixup_label != nullptr) {
6458 __ Bind(fixup_label);
6459 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006460 // /* mirror::Object* */ root = root->Read()
6461 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6462 }
6463 } else {
6464 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006465 // /* GcRoot<mirror::Object> */ root = *address
6466 __ movl(root_reg, address);
6467 if (fixup_label != nullptr) {
6468 __ Bind(fixup_label);
6469 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006470 // Note that GC roots are not affected by heap poisoning, thus we
6471 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006472 }
6473}
6474
6475void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6476 Location ref,
6477 CpuRegister obj,
6478 uint32_t offset,
6479 Location temp,
6480 bool needs_null_check) {
6481 DCHECK(kEmitCompilerReadBarrier);
6482 DCHECK(kUseBakerReadBarrier);
6483
6484 // /* HeapReference<Object> */ ref = *(obj + offset)
6485 Address src(obj, offset);
6486 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6487}
6488
6489void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6490 Location ref,
6491 CpuRegister obj,
6492 uint32_t data_offset,
6493 Location index,
6494 Location temp,
6495 bool needs_null_check) {
6496 DCHECK(kEmitCompilerReadBarrier);
6497 DCHECK(kUseBakerReadBarrier);
6498
Roland Levillain3d312422016-06-23 13:53:42 +01006499 static_assert(
6500 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6501 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006502 // /* HeapReference<Object> */ ref =
6503 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6504 Address src = index.IsConstant() ?
6505 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6506 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6507 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6508}
6509
6510void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6511 Location ref,
6512 CpuRegister obj,
6513 const Address& src,
6514 Location temp,
6515 bool needs_null_check) {
6516 DCHECK(kEmitCompilerReadBarrier);
6517 DCHECK(kUseBakerReadBarrier);
6518
6519 // In slow path based read barriers, the read barrier call is
6520 // inserted after the original load. However, in fast path based
6521 // Baker's read barriers, we need to perform the load of
6522 // mirror::Object::monitor_ *before* the original reference load.
6523 // This load-load ordering is required by the read barrier.
6524 // The fast path/slow path (for Baker's algorithm) should look like:
6525 //
6526 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6527 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6528 // HeapReference<Object> ref = *src; // Original reference load.
6529 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6530 // if (is_gray) {
6531 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6532 // }
6533 //
6534 // Note: the original implementation in ReadBarrier::Barrier is
6535 // slightly more complex as:
6536 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006537 // the high-bits of rb_state, which are expected to be all zeroes
6538 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6539 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006540 // - it performs additional checks that we do not do here for
6541 // performance reasons.
6542
6543 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6544 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6545 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6546
6547 // /* int32_t */ monitor = obj->monitor_
6548 __ movl(temp_reg, Address(obj, monitor_offset));
6549 if (needs_null_check) {
6550 MaybeRecordImplicitNullCheck(instruction);
6551 }
6552 // /* LockWord */ lock_word = LockWord(monitor)
6553 static_assert(sizeof(LockWord) == sizeof(int32_t),
6554 "art::LockWord and int32_t have different sizes.");
6555 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6556 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6557 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6558 static_assert(
6559 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6560 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6561
6562 // Load fence to prevent load-load reordering.
6563 // Note that this is a no-op, thanks to the x86-64 memory model.
6564 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6565
6566 // The actual reference load.
6567 // /* HeapReference<Object> */ ref = *src
6568 __ movl(ref_reg, src);
6569
6570 // Object* ref = ref_addr->AsMirrorPtr()
6571 __ MaybeUnpoisonHeapReference(ref_reg);
6572
6573 // Slow path used to mark the object `ref` when it is gray.
6574 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006575 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006576 AddSlowPath(slow_path);
6577
6578 // if (rb_state == ReadBarrier::gray_ptr_)
6579 // ref = ReadBarrier::Mark(ref);
6580 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6581 __ j(kEqual, slow_path->GetEntryLabel());
6582 __ Bind(slow_path->GetExitLabel());
6583}
6584
6585void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6586 Location out,
6587 Location ref,
6588 Location obj,
6589 uint32_t offset,
6590 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006591 DCHECK(kEmitCompilerReadBarrier);
6592
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006593 // Insert a slow path based read barrier *after* the reference load.
6594 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006595 // If heap poisoning is enabled, the unpoisoning of the loaded
6596 // reference will be carried out by the runtime within the slow
6597 // path.
6598 //
6599 // Note that `ref` currently does not get unpoisoned (when heap
6600 // poisoning is enabled), which is alright as the `ref` argument is
6601 // not used by the artReadBarrierSlow entry point.
6602 //
6603 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6604 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6605 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6606 AddSlowPath(slow_path);
6607
Roland Levillain0d5a2812015-11-13 10:07:31 +00006608 __ jmp(slow_path->GetEntryLabel());
6609 __ Bind(slow_path->GetExitLabel());
6610}
6611
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006612void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6613 Location out,
6614 Location ref,
6615 Location obj,
6616 uint32_t offset,
6617 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006618 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006619 // Baker's read barriers shall be handled by the fast path
6620 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6621 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006622 // If heap poisoning is enabled, unpoisoning will be taken care of
6623 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006624 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006625 } else if (kPoisonHeapReferences) {
6626 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6627 }
6628}
6629
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006630void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6631 Location out,
6632 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006633 DCHECK(kEmitCompilerReadBarrier);
6634
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006635 // Insert a slow path based read barrier *after* the GC root load.
6636 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006637 // Note that GC roots are not affected by heap poisoning, so we do
6638 // not need to do anything special for this here.
6639 SlowPathCode* slow_path =
6640 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6641 AddSlowPath(slow_path);
6642
Roland Levillain0d5a2812015-11-13 10:07:31 +00006643 __ jmp(slow_path->GetEntryLabel());
6644 __ Bind(slow_path->GetExitLabel());
6645}
6646
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006647void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006648 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006649 LOG(FATAL) << "Unreachable";
6650}
6651
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006652void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006653 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006654 LOG(FATAL) << "Unreachable";
6655}
6656
Mark Mendellfe57faa2015-09-18 09:26:15 -04006657// Simple implementation of packed switch - generate cascaded compare/jumps.
6658void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6659 LocationSummary* locations =
6660 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6661 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006662 locations->AddTemp(Location::RequiresRegister());
6663 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006664}
6665
6666void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6667 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006668 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006669 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006670 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6671 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6672 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006673 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6674
6675 // Should we generate smaller inline compare/jumps?
6676 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6677 // Figure out the correct compare values and jump conditions.
6678 // Handle the first compare/branch as a special case because it might
6679 // jump to the default case.
6680 DCHECK_GT(num_entries, 2u);
6681 Condition first_condition;
6682 uint32_t index;
6683 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6684 if (lower_bound != 0) {
6685 first_condition = kLess;
6686 __ cmpl(value_reg_in, Immediate(lower_bound));
6687 __ j(first_condition, codegen_->GetLabelOf(default_block));
6688 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6689
6690 index = 1;
6691 } else {
6692 // Handle all the compare/jumps below.
6693 first_condition = kBelow;
6694 index = 0;
6695 }
6696
6697 // Handle the rest of the compare/jumps.
6698 for (; index + 1 < num_entries; index += 2) {
6699 int32_t compare_to_value = lower_bound + index + 1;
6700 __ cmpl(value_reg_in, Immediate(compare_to_value));
6701 // Jump to successors[index] if value < case_value[index].
6702 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6703 // Jump to successors[index + 1] if value == case_value[index + 1].
6704 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6705 }
6706
6707 if (index != num_entries) {
6708 // There are an odd number of entries. Handle the last one.
6709 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006710 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006711 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6712 }
6713
6714 // And the default for any other value.
6715 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6716 __ jmp(codegen_->GetLabelOf(default_block));
6717 }
6718 return;
6719 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006720
6721 // Remove the bias, if needed.
6722 Register value_reg_out = value_reg_in.AsRegister();
6723 if (lower_bound != 0) {
6724 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6725 value_reg_out = temp_reg.AsRegister();
6726 }
6727 CpuRegister value_reg(value_reg_out);
6728
6729 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006730 __ cmpl(value_reg, Immediate(num_entries - 1));
6731 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006732
Mark Mendell9c86b482015-09-18 13:36:07 -04006733 // We are in the range of the table.
6734 // Load the address of the jump table in the constant area.
6735 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006736
Mark Mendell9c86b482015-09-18 13:36:07 -04006737 // Load the (signed) offset from the jump table.
6738 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6739
6740 // Add the offset to the address of the table base.
6741 __ addq(temp_reg, base_reg);
6742
6743 // And jump.
6744 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006745}
6746
Aart Bikc5d47542016-01-27 17:00:35 -08006747void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6748 if (value == 0) {
6749 __ xorl(dest, dest);
6750 } else {
6751 __ movl(dest, Immediate(value));
6752 }
6753}
6754
Mark Mendell92e83bf2015-05-07 11:25:03 -04006755void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6756 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006757 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006758 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006759 } else if (IsUint<32>(value)) {
6760 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006761 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6762 } else {
6763 __ movq(dest, Immediate(value));
6764 }
6765}
6766
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006767void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6768 if (value == 0) {
6769 __ xorps(dest, dest);
6770 } else {
6771 __ movss(dest, LiteralInt32Address(value));
6772 }
6773}
6774
6775void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6776 if (value == 0) {
6777 __ xorpd(dest, dest);
6778 } else {
6779 __ movsd(dest, LiteralInt64Address(value));
6780 }
6781}
6782
6783void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6784 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6785}
6786
6787void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6788 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6789}
6790
Aart Bika19616e2016-02-01 18:57:58 -08006791void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6792 if (value == 0) {
6793 __ testl(dest, dest);
6794 } else {
6795 __ cmpl(dest, Immediate(value));
6796 }
6797}
6798
6799void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6800 if (IsInt<32>(value)) {
6801 if (value == 0) {
6802 __ testq(dest, dest);
6803 } else {
6804 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6805 }
6806 } else {
6807 // Value won't fit in an int.
6808 __ cmpq(dest, LiteralInt64Address(value));
6809 }
6810}
6811
Mark Mendellcfa410b2015-05-25 16:02:44 -04006812void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6813 DCHECK(dest.IsDoubleStackSlot());
6814 if (IsInt<32>(value)) {
6815 // Can move directly as an int32 constant.
6816 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6817 Immediate(static_cast<int32_t>(value)));
6818 } else {
6819 Load64BitValue(CpuRegister(TMP), value);
6820 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6821 }
6822}
6823
Mark Mendell9c86b482015-09-18 13:36:07 -04006824/**
6825 * Class to handle late fixup of offsets into constant area.
6826 */
6827class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6828 public:
6829 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6830 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6831
6832 protected:
6833 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6834
6835 CodeGeneratorX86_64* codegen_;
6836
6837 private:
6838 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6839 // Patch the correct offset for the instruction. We use the address of the
6840 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6841 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6842 int32_t relative_position = constant_offset - pos;
6843
6844 // Patch in the right value.
6845 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6846 }
6847
6848 // Location in constant area that the fixup refers to.
6849 size_t offset_into_constant_area_;
6850};
6851
6852/**
6853 t * Class to handle late fixup of offsets to a jump table that will be created in the
6854 * constant area.
6855 */
6856class JumpTableRIPFixup : public RIPFixup {
6857 public:
6858 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6859 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6860
6861 void CreateJumpTable() {
6862 X86_64Assembler* assembler = codegen_->GetAssembler();
6863
6864 // Ensure that the reference to the jump table has the correct offset.
6865 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6866 SetOffset(offset_in_constant_table);
6867
6868 // Compute the offset from the start of the function to this jump table.
6869 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6870
6871 // Populate the jump table with the correct values for the jump table.
6872 int32_t num_entries = switch_instr_->GetNumEntries();
6873 HBasicBlock* block = switch_instr_->GetBlock();
6874 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6875 // The value that we want is the target offset - the position of the table.
6876 for (int32_t i = 0; i < num_entries; i++) {
6877 HBasicBlock* b = successors[i];
6878 Label* l = codegen_->GetLabelOf(b);
6879 DCHECK(l->IsBound());
6880 int32_t offset_to_block = l->Position() - current_table_offset;
6881 assembler->AppendInt32(offset_to_block);
6882 }
6883 }
6884
6885 private:
6886 const HPackedSwitch* switch_instr_;
6887};
6888
Mark Mendellf55c3e02015-03-26 21:07:46 -04006889void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6890 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006891 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006892 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6893 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006894 assembler->Align(4, 0);
6895 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006896
6897 // Populate any jump tables.
6898 for (auto jump_table : fixups_to_jump_tables_) {
6899 jump_table->CreateJumpTable();
6900 }
6901
6902 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006903 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006904 }
6905
6906 // And finish up.
6907 CodeGenerator::Finalize(allocator);
6908}
6909
Mark Mendellf55c3e02015-03-26 21:07:46 -04006910Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6911 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6912 return Address::RIP(fixup);
6913}
6914
6915Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6916 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6917 return Address::RIP(fixup);
6918}
6919
6920Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6921 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6922 return Address::RIP(fixup);
6923}
6924
6925Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6926 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6927 return Address::RIP(fixup);
6928}
6929
Andreas Gampe85b62f22015-09-09 13:15:38 -07006930// TODO: trg as memory.
6931void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6932 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006933 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006934 return;
6935 }
6936
6937 DCHECK_NE(type, Primitive::kPrimVoid);
6938
6939 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6940 if (trg.Equals(return_loc)) {
6941 return;
6942 }
6943
6944 // Let the parallel move resolver take care of all of this.
6945 HParallelMove parallel_move(GetGraph()->GetArena());
6946 parallel_move.AddMove(return_loc, trg, type, nullptr);
6947 GetMoveResolver()->EmitNativeCode(&parallel_move);
6948}
6949
Mark Mendell9c86b482015-09-18 13:36:07 -04006950Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6951 // Create a fixup to be used to create and address the jump table.
6952 JumpTableRIPFixup* table_fixup =
6953 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6954
6955 // We have to populate the jump tables.
6956 fixups_to_jump_tables_.push_back(table_fixup);
6957 return Address::RIP(table_fixup);
6958}
6959
Mark Mendellea5af682015-10-22 17:35:49 -04006960void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6961 const Address& addr_high,
6962 int64_t v,
6963 HInstruction* instruction) {
6964 if (IsInt<32>(v)) {
6965 int32_t v_32 = v;
6966 __ movq(addr_low, Immediate(v_32));
6967 MaybeRecordImplicitNullCheck(instruction);
6968 } else {
6969 // Didn't fit in a register. Do it in pieces.
6970 int32_t low_v = Low32Bits(v);
6971 int32_t high_v = High32Bits(v);
6972 __ movl(addr_low, Immediate(low_v));
6973 MaybeRecordImplicitNullCheck(instruction);
6974 __ movl(addr_high, Immediate(high_v));
6975 }
6976}
6977
Roland Levillain4d027112015-07-01 15:41:14 +01006978#undef __
6979
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006980} // namespace x86_64
6981} // namespace art