blob: 01af1e7e3b294dd422c8a5ff487bf8f7bcf1d16c [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Calin Juravle175dc732015-08-25 15:42:32 +010056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400197 // Are we using an array length from memory?
198 HInstruction* array_length = instruction_->InputAt(1);
199 Location length_loc = locations->InAt(1);
200 InvokeRuntimeCallingConvention calling_convention;
201 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
202 // Load the array length into our temporary.
203 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
204 Location array_loc = array_length->GetLocations()->InAt(0);
205 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
206 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
207 // Check for conflicts with index.
208 if (length_loc.Equals(locations->InAt(0))) {
209 // We know we aren't using parameter 2.
210 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
211 }
212 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
213 }
214
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000215 // We're moving two locations to locations that could overlap, so we need a parallel
216 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000217 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100218 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000219 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100220 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400221 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100222 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
223 Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100224 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
225 ? QUICK_ENTRY_POINT(pThrowStringBounds)
226 : QUICK_ENTRY_POINT(pThrowArrayBounds);
227 x86_64_codegen->InvokeRuntime(entry_point_offset,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000228 instruction_,
229 instruction_->GetDexPc(),
230 this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100231 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000232 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100233 }
234
Alexandre Rames8158f282015-08-07 10:26:17 +0100235 bool IsFatal() const OVERRIDE { return true; }
236
Alexandre Rames9931f312015-06-19 14:47:01 +0100237 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
238
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100240 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
241};
242
Andreas Gampe85b62f22015-09-09 13:15:38 -0700243class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000245 LoadClassSlowPathX86_64(HLoadClass* cls,
246 HInstruction* at,
247 uint32_t dex_pc,
248 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000249 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000250 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
251 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000253 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000254 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000255 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100257
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000258 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000259
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000261 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000262 x86_64_codegen->InvokeRuntime(do_clinit_ ?
263 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
264 QUICK_ENTRY_POINT(pInitializeType),
265 at_,
266 dex_pc_,
267 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000268 if (do_clinit_) {
269 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
270 } else {
271 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
272 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000274 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000276 if (out.IsValid()) {
277 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000278 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000279 }
280
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000281 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100282 __ jmp(GetExitLabel());
283 }
284
Alexandre Rames9931f312015-06-19 14:47:01 +0100285 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
286
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100287 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000288 // The class this slow path will load.
289 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100290
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000291 // The instruction where this slow path is happening.
292 // (Might be the load class or an initialization check).
293 HInstruction* const at_;
294
295 // The dex PC of `at_`.
296 const uint32_t dex_pc_;
297
298 // Whether to initialize the class.
299 const bool do_clinit_;
300
301 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100302};
303
Andreas Gampe85b62f22015-09-09 13:15:38 -0700304class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000305 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000306 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000307
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000308 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 LocationSummary* locations = instruction_->GetLocations();
310 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
311
Roland Levillain0d5a2812015-11-13 10:07:31 +0000312 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000313 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000314 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000315
316 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000317 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
318 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000319 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
320 instruction_,
321 instruction_->GetDexPc(),
322 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000323 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000324 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000325 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000326 __ jmp(GetExitLabel());
327 }
328
Alexandre Rames9931f312015-06-19 14:47:01 +0100329 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
330
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000331 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000332 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
333};
334
Andreas Gampe85b62f22015-09-09 13:15:38 -0700335class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000337 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000338 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100342 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
343 : locations->Out();
344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000351 if (!is_fatal_) {
352 SaveLiveRegisters(codegen, locations);
353 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000354
355 // We're moving two locations to locations that could overlap, so we need a parallel
356 // move resolver.
357 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000358 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100359 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000360 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100361 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100362 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100363 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
364 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000365
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
368 instruction_,
369 dex_pc,
370 this);
371 CheckEntrypointTypes<
372 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000373 } else {
374 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000375 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
376 instruction_,
377 dex_pc,
378 this);
379 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000380 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000381
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000382 if (!is_fatal_) {
383 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000384 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000386
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 RestoreLiveRegisters(codegen, locations);
388 __ jmp(GetExitLabel());
389 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000390 }
391
Alexandre Rames9931f312015-06-19 14:47:01 +0100392 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
393
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000394 bool IsFatal() const OVERRIDE { return is_fatal_; }
395
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000397 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000398
399 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
400};
401
Andreas Gampe85b62f22015-09-09 13:15:38 -0700402class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 public:
Aart Bik42249c32016-01-07 15:33:50 -0800404 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000408 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000411 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800412 instruction_,
413 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000414 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000415 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Alexandre Rames9931f312015-06-19 14:47:01 +0100418 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
428 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
434 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
438 Primitive::kPrimNot,
439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
443 Primitive::kPrimInt,
444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
448 Primitive::kPrimNot,
449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
453 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
454 instruction_,
455 instruction_->GetDexPc(),
456 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000457 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100458 RestoreLiveRegisters(codegen, locations);
459 __ jmp(GetExitLabel());
460 }
461
462 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
463
464 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100465 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
466};
467
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468// Slow path marking an object during a read barrier.
469class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
470 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100471 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location obj)
472 : SlowPathCode(instruction), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000473 DCHECK(kEmitCompilerReadBarrier);
474 }
475
476 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
477
478 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
479 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain02b75802016-07-13 11:54:35 +0100480 Register reg = obj_.AsRegister<Register>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000481 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100482 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000483 DCHECK(instruction_->IsInstanceFieldGet() ||
484 instruction_->IsStaticFieldGet() ||
485 instruction_->IsArrayGet() ||
486 instruction_->IsLoadClass() ||
487 instruction_->IsLoadString() ||
488 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100489 instruction_->IsCheckCast() ||
490 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
491 instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000492 << "Unexpected instruction in read barrier marking slow path: "
493 << instruction_->DebugName();
494
495 __ Bind(GetEntryLabel());
Roland Levillain02b75802016-07-13 11:54:35 +0100496 // Save live registers before the runtime call, and in particular
497 // RDI and/or RAX (if they are live), as they are clobbered by
498 // functions art_quick_read_barrier_mark_regX.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 SaveLiveRegisters(codegen, locations);
500
501 InvokeRuntimeCallingConvention calling_convention;
502 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100503 DCHECK_NE(reg, RSP);
504 DCHECK(0 <= reg && reg < kNumberOfCpuRegisters) << reg;
505 // "Compact" slow path, saving two moves.
506 //
507 // Instead of using the standard runtime calling convention (input
508 // and output in R0):
509 //
510 // RDI <- obj
511 // RAX <- ReadBarrierMark(RDI)
512 // obj <- RAX
513 //
514 // we just use rX (the register holding `obj`) as input and output
515 // of a dedicated entrypoint:
516 //
517 // rX <- ReadBarrierMarkRegX(rX)
518 //
519 int32_t entry_point_offset =
520 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64WordSize>(reg);
521 // TODO: Do not emit a stack map for this runtime call.
522 x86_64_codegen->InvokeRuntime(entry_point_offset,
523 instruction_,
524 instruction_->GetDexPc(),
525 this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000526
527 RestoreLiveRegisters(codegen, locations);
528 __ jmp(GetExitLabel());
529 }
530
531 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000532 const Location obj_;
533
534 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
535};
536
Roland Levillain0d5a2812015-11-13 10:07:31 +0000537// Slow path generating a read barrier for a heap reference.
538class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
539 public:
540 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
541 Location out,
542 Location ref,
543 Location obj,
544 uint32_t offset,
545 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000546 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000547 out_(out),
548 ref_(ref),
549 obj_(obj),
550 offset_(offset),
551 index_(index) {
552 DCHECK(kEmitCompilerReadBarrier);
553 // If `obj` is equal to `out` or `ref`, it means the initial
554 // object has been overwritten by (or after) the heap object
555 // reference load to be instrumented, e.g.:
556 //
557 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000558 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000559 //
560 // In that case, we have lost the information about the original
561 // object, and the emitted read barrier cannot work properly.
562 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
563 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
564}
565
566 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
567 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
568 LocationSummary* locations = instruction_->GetLocations();
569 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
570 DCHECK(locations->CanCall());
571 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100572 DCHECK(instruction_->IsInstanceFieldGet() ||
573 instruction_->IsStaticFieldGet() ||
574 instruction_->IsArrayGet() ||
575 instruction_->IsInstanceOf() ||
576 instruction_->IsCheckCast() ||
577 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000578 instruction_->GetLocations()->Intrinsified()))
579 << "Unexpected instruction in read barrier for heap reference slow path: "
580 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000581
582 __ Bind(GetEntryLabel());
583 SaveLiveRegisters(codegen, locations);
584
585 // We may have to change the index's value, but as `index_` is a
586 // constant member (like other "inputs" of this slow path),
587 // introduce a copy of it, `index`.
588 Location index = index_;
589 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100590 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000591 if (instruction_->IsArrayGet()) {
592 // Compute real offset and store it in index_.
593 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
594 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
595 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
596 // We are about to change the value of `index_reg` (see the
597 // calls to art::x86_64::X86_64Assembler::shll and
598 // art::x86_64::X86_64Assembler::AddImmediate below), but it
599 // has not been saved by the previous call to
600 // art::SlowPathCode::SaveLiveRegisters, as it is a
601 // callee-save register --
602 // art::SlowPathCode::SaveLiveRegisters does not consider
603 // callee-save registers, as it has been designed with the
604 // assumption that callee-save registers are supposed to be
605 // handled by the called function. So, as a callee-save
606 // register, `index_reg` _would_ eventually be saved onto
607 // the stack, but it would be too late: we would have
608 // changed its value earlier. Therefore, we manually save
609 // it here into another freely available register,
610 // `free_reg`, chosen of course among the caller-save
611 // registers (as a callee-save `free_reg` register would
612 // exhibit the same problem).
613 //
614 // Note we could have requested a temporary register from
615 // the register allocator instead; but we prefer not to, as
616 // this is a slow path, and we know we can find a
617 // caller-save register that is available.
618 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
619 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
620 index_reg = free_reg;
621 index = Location::RegisterLocation(index_reg);
622 } else {
623 // The initial register stored in `index_` has already been
624 // saved in the call to art::SlowPathCode::SaveLiveRegisters
625 // (as it is not a callee-save register), so we can freely
626 // use it.
627 }
628 // Shifting the index value contained in `index_reg` by the
629 // scale factor (2) cannot overflow in practice, as the
630 // runtime is unable to allocate object arrays with a size
631 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
632 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
633 static_assert(
634 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
635 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
636 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
637 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100638 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
639 // intrinsics, `index_` is not shifted by a scale factor of 2
640 // (as in the case of ArrayGet), as it is actually an offset
641 // to an object field within an object.
642 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000643 DCHECK(instruction_->GetLocations()->Intrinsified());
644 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
645 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
646 << instruction_->AsInvoke()->GetIntrinsic();
647 DCHECK_EQ(offset_, 0U);
648 DCHECK(index_.IsRegister());
649 }
650 }
651
652 // We're moving two or three locations to locations that could
653 // overlap, so we need a parallel move resolver.
654 InvokeRuntimeCallingConvention calling_convention;
655 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
656 parallel_move.AddMove(ref_,
657 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
658 Primitive::kPrimNot,
659 nullptr);
660 parallel_move.AddMove(obj_,
661 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
662 Primitive::kPrimNot,
663 nullptr);
664 if (index.IsValid()) {
665 parallel_move.AddMove(index,
666 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
667 Primitive::kPrimInt,
668 nullptr);
669 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
670 } else {
671 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
672 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
673 }
674 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
675 instruction_,
676 instruction_->GetDexPc(),
677 this);
678 CheckEntrypointTypes<
679 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
680 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
681
682 RestoreLiveRegisters(codegen, locations);
683 __ jmp(GetExitLabel());
684 }
685
686 const char* GetDescription() const OVERRIDE {
687 return "ReadBarrierForHeapReferenceSlowPathX86_64";
688 }
689
690 private:
691 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
692 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
693 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
694 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
695 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
696 return static_cast<CpuRegister>(i);
697 }
698 }
699 // We shall never fail to find a free caller-save register, as
700 // there are more than two core caller-save registers on x86-64
701 // (meaning it is possible to find one which is different from
702 // `ref` and `obj`).
703 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
704 LOG(FATAL) << "Could not find a free caller-save register";
705 UNREACHABLE();
706 }
707
Roland Levillain0d5a2812015-11-13 10:07:31 +0000708 const Location out_;
709 const Location ref_;
710 const Location obj_;
711 const uint32_t offset_;
712 // An additional location containing an index to an array.
713 // Only used for HArrayGet and the UnsafeGetObject &
714 // UnsafeGetObjectVolatile intrinsics.
715 const Location index_;
716
717 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
718};
719
720// Slow path generating a read barrier for a GC root.
721class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
722 public:
723 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000724 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000725 DCHECK(kEmitCompilerReadBarrier);
726 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000727
728 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
729 LocationSummary* locations = instruction_->GetLocations();
730 DCHECK(locations->CanCall());
731 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000732 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
733 << "Unexpected instruction in read barrier for GC root slow path: "
734 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000735
736 __ Bind(GetEntryLabel());
737 SaveLiveRegisters(codegen, locations);
738
739 InvokeRuntimeCallingConvention calling_convention;
740 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
741 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
742 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
743 instruction_,
744 instruction_->GetDexPc(),
745 this);
746 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
747 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
748
749 RestoreLiveRegisters(codegen, locations);
750 __ jmp(GetExitLabel());
751 }
752
753 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
754
755 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000756 const Location out_;
757 const Location root_;
758
759 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
760};
761
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100762#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700763// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
764#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100765
Roland Levillain4fa13f62015-07-06 18:11:54 +0100766inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700767 switch (cond) {
768 case kCondEQ: return kEqual;
769 case kCondNE: return kNotEqual;
770 case kCondLT: return kLess;
771 case kCondLE: return kLessEqual;
772 case kCondGT: return kGreater;
773 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700774 case kCondB: return kBelow;
775 case kCondBE: return kBelowEqual;
776 case kCondA: return kAbove;
777 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700778 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100779 LOG(FATAL) << "Unreachable";
780 UNREACHABLE();
781}
782
Aart Bike9f37602015-10-09 11:15:55 -0700783// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100784inline Condition X86_64FPCondition(IfCondition cond) {
785 switch (cond) {
786 case kCondEQ: return kEqual;
787 case kCondNE: return kNotEqual;
788 case kCondLT: return kBelow;
789 case kCondLE: return kBelowEqual;
790 case kCondGT: return kAbove;
791 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700792 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100793 };
794 LOG(FATAL) << "Unreachable";
795 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700796}
797
Vladimir Markodc151b22015-10-15 18:02:30 +0100798HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
799 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
800 MethodReference target_method ATTRIBUTE_UNUSED) {
801 switch (desired_dispatch_info.code_ptr_location) {
802 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
803 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
804 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
805 return HInvokeStaticOrDirect::DispatchInfo {
806 desired_dispatch_info.method_load_kind,
807 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
808 desired_dispatch_info.method_load_data,
809 0u
810 };
811 default:
812 return desired_dispatch_info;
813 }
814}
815
Serguei Katkov288c7a82016-05-16 11:53:15 +0600816Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
817 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800818 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000819 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
820 switch (invoke->GetMethodLoadKind()) {
821 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
822 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000823 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000824 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000825 break;
826 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000827 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000828 break;
829 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
830 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
831 break;
832 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
833 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
834 method_patches_.emplace_back(invoke->GetTargetMethod());
835 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
836 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000837 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000838 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000839 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000840 // Bind a new fixup label at the end of the "movl" insn.
841 uint32_t offset = invoke->GetDexCacheArrayOffset();
842 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000843 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000844 }
Vladimir Marko58155012015-08-19 12:49:41 +0000845 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000846 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000847 Register method_reg;
848 CpuRegister reg = temp.AsRegister<CpuRegister>();
849 if (current_method.IsRegister()) {
850 method_reg = current_method.AsRegister<Register>();
851 } else {
852 DCHECK(invoke->GetLocations()->Intrinsified());
853 DCHECK(!current_method.IsValid());
854 method_reg = reg.AsRegister();
855 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
856 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000857 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100858 __ movq(reg,
859 Address(CpuRegister(method_reg),
860 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100861 // temp = temp[index_in_cache];
862 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
863 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000864 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
865 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100866 }
Vladimir Marko58155012015-08-19 12:49:41 +0000867 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600868 return callee_method;
869}
870
871void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
872 Location temp) {
873 // All registers are assumed to be correctly set up.
874 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000875
876 switch (invoke->GetCodePtrLocation()) {
877 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
878 __ call(&frame_entry_label_);
879 break;
880 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
881 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
882 Label* label = &relative_call_patches_.back().label;
883 __ call(label); // Bind to the patch label, override at link time.
884 __ Bind(label); // Bind the label at the end of the "call" insn.
885 break;
886 }
887 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
888 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100889 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
890 LOG(FATAL) << "Unsupported";
891 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000892 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
893 // (callee_method + offset_of_quick_compiled_code)()
894 __ call(Address(callee_method.AsRegister<CpuRegister>(),
895 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
896 kX86_64WordSize).SizeValue()));
897 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000898 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800899
900 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800901}
902
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000903void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
904 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
905 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
906 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000907
908 // Use the calling convention instead of the location of the receiver, as
909 // intrinsics may have put the receiver in a different register. In the intrinsics
910 // slow path, the arguments have been moved to the right place, so here we are
911 // guaranteed that the receiver is the first register of the calling convention.
912 InvokeDexCallingConvention calling_convention;
913 Register receiver = calling_convention.GetRegisterAt(0);
914
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000915 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000916 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000917 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000918 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000919 // Instead of simply (possibly) unpoisoning `temp` here, we should
920 // emit a read barrier for the previous class reference load.
921 // However this is not required in practice, as this is an
922 // intermediate/temporary reference and because the current
923 // concurrent copying collector keeps the from-space memory
924 // intact/accessible until the end of the marking phase (the
925 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000926 __ MaybeUnpoisonHeapReference(temp);
927 // temp = temp->GetMethodAt(method_offset);
928 __ movq(temp, Address(temp, method_offset));
929 // call temp->GetEntryPoint();
930 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
931 kX86_64WordSize).SizeValue()));
932}
933
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000934void CodeGeneratorX86_64::RecordSimplePatch() {
935 if (GetCompilerOptions().GetIncludePatchInformation()) {
936 simple_patches_.emplace_back();
937 __ Bind(&simple_patches_.back());
938 }
939}
940
941void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
942 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
943 __ Bind(&string_patches_.back().label);
944}
945
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100946void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
947 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
948 __ Bind(&type_patches_.back().label);
949}
950
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000951Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
952 uint32_t element_offset) {
953 // Add a patch entry and return the label.
954 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
955 return &pc_relative_dex_cache_patches_.back().label;
956}
957
Vladimir Marko58155012015-08-19 12:49:41 +0000958void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
959 DCHECK(linker_patches->empty());
960 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000961 method_patches_.size() +
962 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000963 pc_relative_dex_cache_patches_.size() +
964 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100965 string_patches_.size() +
966 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000967 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000968 // The label points to the end of the "movl" insn but the literal offset for method
969 // patch needs to point to the embedded constant which occupies the last 4 bytes.
970 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000971 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000972 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000973 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
974 info.target_method.dex_file,
975 info.target_method.dex_method_index));
976 }
977 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000978 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000979 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
980 info.target_method.dex_file,
981 info.target_method.dex_method_index));
982 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000983 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
984 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000985 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
986 &info.target_dex_file,
987 info.label.Position(),
988 info.element_offset));
989 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000990 for (const Label& label : simple_patches_) {
991 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
992 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
993 }
994 for (const StringPatchInfo<Label>& info : string_patches_) {
995 // These are always PC-relative, see GetSupportedLoadStringKind().
996 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
997 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
998 &info.dex_file,
999 info.label.Position(),
1000 info.string_index));
1001 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001002 for (const TypePatchInfo<Label>& info : type_patches_) {
1003 // These are always PC-relative, see GetSupportedLoadClassKind().
1004 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1005 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
1006 &info.dex_file,
1007 info.label.Position(),
1008 info.type_index));
1009 }
Vladimir Marko58155012015-08-19 12:49:41 +00001010}
1011
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001012void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001013 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001014}
1015
1016void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001017 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001018}
1019
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001020size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1021 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1022 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001023}
1024
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001025size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1026 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1027 return kX86_64WordSize;
1028}
1029
1030size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1031 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1032 return kX86_64WordSize;
1033}
1034
1035size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1036 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1037 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001038}
1039
Calin Juravle175dc732015-08-25 15:42:32 +01001040void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1041 HInstruction* instruction,
1042 uint32_t dex_pc,
1043 SlowPathCode* slow_path) {
1044 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
1045 instruction,
1046 dex_pc,
1047 slow_path);
1048}
1049
1050void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +01001051 HInstruction* instruction,
1052 uint32_t dex_pc,
1053 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001054 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001055 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +01001056 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +01001057}
1058
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001059static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001060// Use a fake return address register to mimic Quick.
1061static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001062CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001063 const X86_64InstructionSetFeatures& isa_features,
1064 const CompilerOptions& compiler_options,
1065 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001066 : CodeGenerator(graph,
1067 kNumberOfCpuRegisters,
1068 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001069 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001070 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1071 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001072 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001073 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1074 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001075 compiler_options,
1076 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001077 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001078 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001079 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001080 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001081 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001082 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001083 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001084 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1085 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001086 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001087 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1088 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001089 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001090 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001091 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1092}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001093
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001094InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1095 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001096 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001097 assembler_(codegen->GetAssembler()),
1098 codegen_(codegen) {}
1099
David Brazdil58282f42016-01-14 12:45:10 +00001100void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001101 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001102 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001103
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001104 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001105 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001106}
1107
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001108static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001109 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001110}
David Srbecky9d8606d2015-04-12 09:35:32 +01001111
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001112static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001113 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001114}
1115
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001116void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001117 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001118 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001119 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001120 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001121 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001122
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001123 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001124 __ testq(CpuRegister(RAX), Address(
1125 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001126 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001127 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001128
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001129 if (HasEmptyFrame()) {
1130 return;
1131 }
1132
Nicolas Geoffray98893962015-01-21 12:32:32 +00001133 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001134 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001135 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001136 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001137 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1138 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001139 }
1140 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001141
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001142 int adjust = GetFrameSize() - GetCoreSpillSize();
1143 __ subq(CpuRegister(RSP), Immediate(adjust));
1144 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001145 uint32_t xmm_spill_location = GetFpuSpillStart();
1146 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001147
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001148 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1149 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001150 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1151 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1152 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001153 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001154 }
1155
Mathieu Chartiere401d142015-04-22 13:56:20 -07001156 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001157 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001158}
1159
1160void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001161 __ cfi().RememberState();
1162 if (!HasEmptyFrame()) {
1163 uint32_t xmm_spill_location = GetFpuSpillStart();
1164 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1165 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1166 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1167 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1168 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1169 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1170 }
1171 }
1172
1173 int adjust = GetFrameSize() - GetCoreSpillSize();
1174 __ addq(CpuRegister(RSP), Immediate(adjust));
1175 __ cfi().AdjustCFAOffset(-adjust);
1176
1177 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1178 Register reg = kCoreCalleeSaves[i];
1179 if (allocated_registers_.ContainsCoreRegister(reg)) {
1180 __ popq(CpuRegister(reg));
1181 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1182 __ cfi().Restore(DWARFReg(reg));
1183 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001184 }
1185 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001186 __ ret();
1187 __ cfi().RestoreState();
1188 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001189}
1190
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001191void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1192 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001193}
1194
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001195void CodeGeneratorX86_64::Move(Location destination, Location source) {
1196 if (source.Equals(destination)) {
1197 return;
1198 }
1199 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001200 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001201 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001202 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001203 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001204 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001205 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001206 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1207 } else if (source.IsConstant()) {
1208 HConstant* constant = source.GetConstant();
1209 if (constant->IsLongConstant()) {
1210 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1211 } else {
1212 Load32BitValue(dest, GetInt32ValueOf(constant));
1213 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001214 } else {
1215 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001216 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001217 }
1218 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001219 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001220 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001221 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001222 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001223 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1224 } else if (source.IsConstant()) {
1225 HConstant* constant = source.GetConstant();
1226 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1227 if (constant->IsFloatConstant()) {
1228 Load32BitValue(dest, static_cast<int32_t>(value));
1229 } else {
1230 Load64BitValue(dest, value);
1231 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001232 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001233 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001234 } else {
1235 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001236 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001237 }
1238 } else if (destination.IsStackSlot()) {
1239 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001240 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001241 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001242 } else if (source.IsFpuRegister()) {
1243 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001244 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001245 } else if (source.IsConstant()) {
1246 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001247 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001248 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001249 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001250 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001251 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1252 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001253 }
1254 } else {
1255 DCHECK(destination.IsDoubleStackSlot());
1256 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001257 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001258 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001259 } else if (source.IsFpuRegister()) {
1260 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001261 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001262 } else if (source.IsConstant()) {
1263 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001264 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001265 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001266 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001267 } else {
1268 DCHECK(constant->IsLongConstant());
1269 value = constant->AsLongConstant()->GetValue();
1270 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001271 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001272 } else {
1273 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001274 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1275 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001276 }
1277 }
1278}
1279
Calin Juravle175dc732015-08-25 15:42:32 +01001280void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1281 DCHECK(location.IsRegister());
1282 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1283}
1284
Calin Juravlee460d1d2015-09-29 04:52:17 +01001285void CodeGeneratorX86_64::MoveLocation(
1286 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1287 Move(dst, src);
1288}
1289
1290void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1291 if (location.IsRegister()) {
1292 locations->AddTemp(location);
1293 } else {
1294 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1295 }
1296}
1297
David Brazdilfc6a86a2015-06-26 10:33:45 +00001298void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001299 DCHECK(!successor->IsExitBlock());
1300
1301 HBasicBlock* block = got->GetBlock();
1302 HInstruction* previous = got->GetPrevious();
1303
1304 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001305 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001306 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1307 return;
1308 }
1309
1310 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1311 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1312 }
1313 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001314 __ jmp(codegen_->GetLabelOf(successor));
1315 }
1316}
1317
David Brazdilfc6a86a2015-06-26 10:33:45 +00001318void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1319 got->SetLocations(nullptr);
1320}
1321
1322void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1323 HandleGoto(got, got->GetSuccessor());
1324}
1325
1326void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1327 try_boundary->SetLocations(nullptr);
1328}
1329
1330void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1331 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1332 if (!successor->IsExitBlock()) {
1333 HandleGoto(try_boundary, successor);
1334 }
1335}
1336
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001337void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1338 exit->SetLocations(nullptr);
1339}
1340
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001341void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001342}
1343
Mark Mendell152408f2015-12-31 12:28:50 -05001344template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001345void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001346 LabelType* true_label,
1347 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001348 if (cond->IsFPConditionTrueIfNaN()) {
1349 __ j(kUnordered, true_label);
1350 } else if (cond->IsFPConditionFalseIfNaN()) {
1351 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001352 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001353 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001354}
1355
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001356void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001357 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001358
Mark Mendellc4701932015-04-10 13:18:51 -04001359 Location left = locations->InAt(0);
1360 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001361 Primitive::Type type = condition->InputAt(0)->GetType();
1362 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001363 case Primitive::kPrimBoolean:
1364 case Primitive::kPrimByte:
1365 case Primitive::kPrimChar:
1366 case Primitive::kPrimShort:
1367 case Primitive::kPrimInt:
1368 case Primitive::kPrimNot: {
1369 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1370 if (right.IsConstant()) {
1371 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1372 if (value == 0) {
1373 __ testl(left_reg, left_reg);
1374 } else {
1375 __ cmpl(left_reg, Immediate(value));
1376 }
1377 } else if (right.IsStackSlot()) {
1378 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1379 } else {
1380 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1381 }
1382 break;
1383 }
Mark Mendellc4701932015-04-10 13:18:51 -04001384 case Primitive::kPrimLong: {
1385 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1386 if (right.IsConstant()) {
1387 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001388 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001389 } else if (right.IsDoubleStackSlot()) {
1390 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1391 } else {
1392 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1393 }
Mark Mendellc4701932015-04-10 13:18:51 -04001394 break;
1395 }
1396 case Primitive::kPrimFloat: {
1397 if (right.IsFpuRegister()) {
1398 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1399 } else if (right.IsConstant()) {
1400 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1401 codegen_->LiteralFloatAddress(
1402 right.GetConstant()->AsFloatConstant()->GetValue()));
1403 } else {
1404 DCHECK(right.IsStackSlot());
1405 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1406 Address(CpuRegister(RSP), right.GetStackIndex()));
1407 }
Mark Mendellc4701932015-04-10 13:18:51 -04001408 break;
1409 }
1410 case Primitive::kPrimDouble: {
1411 if (right.IsFpuRegister()) {
1412 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1413 } else if (right.IsConstant()) {
1414 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1415 codegen_->LiteralDoubleAddress(
1416 right.GetConstant()->AsDoubleConstant()->GetValue()));
1417 } else {
1418 DCHECK(right.IsDoubleStackSlot());
1419 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1420 Address(CpuRegister(RSP), right.GetStackIndex()));
1421 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001422 break;
1423 }
1424 default:
1425 LOG(FATAL) << "Unexpected condition type " << type;
1426 }
1427}
1428
1429template<class LabelType>
1430void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1431 LabelType* true_target_in,
1432 LabelType* false_target_in) {
1433 // Generated branching requires both targets to be explicit. If either of the
1434 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1435 LabelType fallthrough_target;
1436 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1437 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1438
1439 // Generate the comparison to set the CC.
1440 GenerateCompareTest(condition);
1441
1442 // Now generate the correct jump(s).
1443 Primitive::Type type = condition->InputAt(0)->GetType();
1444 switch (type) {
1445 case Primitive::kPrimLong: {
1446 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1447 break;
1448 }
1449 case Primitive::kPrimFloat: {
1450 GenerateFPJumps(condition, true_target, false_target);
1451 break;
1452 }
1453 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001454 GenerateFPJumps(condition, true_target, false_target);
1455 break;
1456 }
1457 default:
1458 LOG(FATAL) << "Unexpected condition type " << type;
1459 }
1460
David Brazdil0debae72015-11-12 18:37:00 +00001461 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001462 __ jmp(false_target);
1463 }
David Brazdil0debae72015-11-12 18:37:00 +00001464
1465 if (fallthrough_target.IsLinked()) {
1466 __ Bind(&fallthrough_target);
1467 }
Mark Mendellc4701932015-04-10 13:18:51 -04001468}
1469
David Brazdil0debae72015-11-12 18:37:00 +00001470static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1471 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1472 // are set only strictly before `branch`. We can't use the eflags on long
1473 // conditions if they are materialized due to the complex branching.
1474 return cond->IsCondition() &&
1475 cond->GetNext() == branch &&
1476 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1477}
1478
Mark Mendell152408f2015-12-31 12:28:50 -05001479template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001480void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001481 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001482 LabelType* true_target,
1483 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001484 HInstruction* cond = instruction->InputAt(condition_input_index);
1485
1486 if (true_target == nullptr && false_target == nullptr) {
1487 // Nothing to do. The code always falls through.
1488 return;
1489 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001490 // Constant condition, statically compared against "true" (integer value 1).
1491 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001492 if (true_target != nullptr) {
1493 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001494 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001495 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001496 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001497 if (false_target != nullptr) {
1498 __ jmp(false_target);
1499 }
1500 }
1501 return;
1502 }
1503
1504 // The following code generates these patterns:
1505 // (1) true_target == nullptr && false_target != nullptr
1506 // - opposite condition true => branch to false_target
1507 // (2) true_target != nullptr && false_target == nullptr
1508 // - condition true => branch to true_target
1509 // (3) true_target != nullptr && false_target != nullptr
1510 // - condition true => branch to true_target
1511 // - branch to false_target
1512 if (IsBooleanValueOrMaterializedCondition(cond)) {
1513 if (AreEflagsSetFrom(cond, instruction)) {
1514 if (true_target == nullptr) {
1515 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1516 } else {
1517 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1518 }
1519 } else {
1520 // Materialized condition, compare against 0.
1521 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1522 if (lhs.IsRegister()) {
1523 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1524 } else {
1525 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1526 }
1527 if (true_target == nullptr) {
1528 __ j(kEqual, false_target);
1529 } else {
1530 __ j(kNotEqual, true_target);
1531 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001532 }
1533 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001534 // Condition has not been materialized, use its inputs as the
1535 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001536 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001537
David Brazdil0debae72015-11-12 18:37:00 +00001538 // If this is a long or FP comparison that has been folded into
1539 // the HCondition, generate the comparison directly.
1540 Primitive::Type type = condition->InputAt(0)->GetType();
1541 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1542 GenerateCompareTestAndBranch(condition, true_target, false_target);
1543 return;
1544 }
1545
1546 Location lhs = condition->GetLocations()->InAt(0);
1547 Location rhs = condition->GetLocations()->InAt(1);
1548 if (rhs.IsRegister()) {
1549 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1550 } else if (rhs.IsConstant()) {
1551 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001552 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001553 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001554 __ cmpl(lhs.AsRegister<CpuRegister>(),
1555 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1556 }
1557 if (true_target == nullptr) {
1558 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1559 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001560 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001561 }
Dave Allison20dfc792014-06-16 20:44:29 -07001562 }
David Brazdil0debae72015-11-12 18:37:00 +00001563
1564 // If neither branch falls through (case 3), the conditional branch to `true_target`
1565 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1566 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001567 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001568 }
1569}
1570
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001571void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001572 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1573 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001574 locations->SetInAt(0, Location::Any());
1575 }
1576}
1577
1578void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001579 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1580 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1581 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1582 nullptr : codegen_->GetLabelOf(true_successor);
1583 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1584 nullptr : codegen_->GetLabelOf(false_successor);
1585 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001586}
1587
1588void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1589 LocationSummary* locations = new (GetGraph()->GetArena())
1590 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001591 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001592 locations->SetInAt(0, Location::Any());
1593 }
1594}
1595
1596void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001597 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001598 GenerateTestAndBranch<Label>(deoptimize,
1599 /* condition_input_index */ 0,
1600 slow_path->GetEntryLabel(),
1601 /* false_target */ nullptr);
1602}
1603
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001604static bool SelectCanUseCMOV(HSelect* select) {
1605 // There are no conditional move instructions for XMMs.
1606 if (Primitive::IsFloatingPointType(select->GetType())) {
1607 return false;
1608 }
1609
1610 // A FP condition doesn't generate the single CC that we need.
1611 HInstruction* condition = select->GetCondition();
1612 if (condition->IsCondition() &&
1613 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1614 return false;
1615 }
1616
1617 // We can generate a CMOV for this Select.
1618 return true;
1619}
1620
David Brazdil74eb1b22015-12-14 11:44:01 +00001621void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1622 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1623 if (Primitive::IsFloatingPointType(select->GetType())) {
1624 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001625 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001626 } else {
1627 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001628 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001629 if (select->InputAt(1)->IsConstant()) {
1630 locations->SetInAt(1, Location::RequiresRegister());
1631 } else {
1632 locations->SetInAt(1, Location::Any());
1633 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001634 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001635 locations->SetInAt(1, Location::Any());
1636 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001637 }
1638 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1639 locations->SetInAt(2, Location::RequiresRegister());
1640 }
1641 locations->SetOut(Location::SameAsFirstInput());
1642}
1643
1644void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1645 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001646 if (SelectCanUseCMOV(select)) {
1647 // If both the condition and the source types are integer, we can generate
1648 // a CMOV to implement Select.
1649 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001650 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001651 DCHECK(locations->InAt(0).Equals(locations->Out()));
1652
1653 HInstruction* select_condition = select->GetCondition();
1654 Condition cond = kNotEqual;
1655
1656 // Figure out how to test the 'condition'.
1657 if (select_condition->IsCondition()) {
1658 HCondition* condition = select_condition->AsCondition();
1659 if (!condition->IsEmittedAtUseSite()) {
1660 // This was a previously materialized condition.
1661 // Can we use the existing condition code?
1662 if (AreEflagsSetFrom(condition, select)) {
1663 // Materialization was the previous instruction. Condition codes are right.
1664 cond = X86_64IntegerCondition(condition->GetCondition());
1665 } else {
1666 // No, we have to recreate the condition code.
1667 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1668 __ testl(cond_reg, cond_reg);
1669 }
1670 } else {
1671 GenerateCompareTest(condition);
1672 cond = X86_64IntegerCondition(condition->GetCondition());
1673 }
1674 } else {
1675 // Must be a boolean condition, which needs to be compared to 0.
1676 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1677 __ testl(cond_reg, cond_reg);
1678 }
1679
1680 // If the condition is true, overwrite the output, which already contains false.
1681 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001682 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1683 if (value_true_loc.IsRegister()) {
1684 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1685 } else {
1686 __ cmov(cond,
1687 value_false,
1688 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1689 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001690 } else {
1691 NearLabel false_target;
1692 GenerateTestAndBranch<NearLabel>(select,
1693 /* condition_input_index */ 2,
1694 /* true_target */ nullptr,
1695 &false_target);
1696 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1697 __ Bind(&false_target);
1698 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001699}
1700
David Srbecky0cf44932015-12-09 14:09:59 +00001701void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1702 new (GetGraph()->GetArena()) LocationSummary(info);
1703}
1704
David Srbeckyd28f4a02016-03-14 17:14:24 +00001705void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1706 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001707}
1708
1709void CodeGeneratorX86_64::GenerateNop() {
1710 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001711}
1712
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001713void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001714 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001715 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001716 // Handle the long/FP comparisons made in instruction simplification.
1717 switch (cond->InputAt(0)->GetType()) {
1718 case Primitive::kPrimLong:
1719 locations->SetInAt(0, Location::RequiresRegister());
1720 locations->SetInAt(1, Location::Any());
1721 break;
1722 case Primitive::kPrimFloat:
1723 case Primitive::kPrimDouble:
1724 locations->SetInAt(0, Location::RequiresFpuRegister());
1725 locations->SetInAt(1, Location::Any());
1726 break;
1727 default:
1728 locations->SetInAt(0, Location::RequiresRegister());
1729 locations->SetInAt(1, Location::Any());
1730 break;
1731 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001732 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001733 locations->SetOut(Location::RequiresRegister());
1734 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001735}
1736
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001737void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001738 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001739 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001740 }
Mark Mendellc4701932015-04-10 13:18:51 -04001741
1742 LocationSummary* locations = cond->GetLocations();
1743 Location lhs = locations->InAt(0);
1744 Location rhs = locations->InAt(1);
1745 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001746 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001747
1748 switch (cond->InputAt(0)->GetType()) {
1749 default:
1750 // Integer case.
1751
1752 // Clear output register: setcc only sets the low byte.
1753 __ xorl(reg, reg);
1754
1755 if (rhs.IsRegister()) {
1756 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1757 } else if (rhs.IsConstant()) {
1758 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001759 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001760 } else {
1761 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1762 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001763 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001764 return;
1765 case Primitive::kPrimLong:
1766 // Clear output register: setcc only sets the low byte.
1767 __ xorl(reg, reg);
1768
1769 if (rhs.IsRegister()) {
1770 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1771 } else if (rhs.IsConstant()) {
1772 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001773 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001774 } else {
1775 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1776 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001777 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001778 return;
1779 case Primitive::kPrimFloat: {
1780 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1781 if (rhs.IsConstant()) {
1782 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1783 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1784 } else if (rhs.IsStackSlot()) {
1785 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1786 } else {
1787 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1788 }
1789 GenerateFPJumps(cond, &true_label, &false_label);
1790 break;
1791 }
1792 case Primitive::kPrimDouble: {
1793 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1794 if (rhs.IsConstant()) {
1795 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1796 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1797 } else if (rhs.IsDoubleStackSlot()) {
1798 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1799 } else {
1800 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1801 }
1802 GenerateFPJumps(cond, &true_label, &false_label);
1803 break;
1804 }
1805 }
1806
1807 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001808 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001809
Roland Levillain4fa13f62015-07-06 18:11:54 +01001810 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001811 __ Bind(&false_label);
1812 __ xorl(reg, reg);
1813 __ jmp(&done_label);
1814
Roland Levillain4fa13f62015-07-06 18:11:54 +01001815 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001816 __ Bind(&true_label);
1817 __ movl(reg, Immediate(1));
1818 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001819}
1820
1821void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001822 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001823}
1824
1825void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001826 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001827}
1828
1829void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001830 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001831}
1832
1833void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001834 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001835}
1836
1837void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001838 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001839}
1840
1841void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001842 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001843}
1844
1845void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001846 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001847}
1848
1849void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001850 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001851}
1852
1853void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001854 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001855}
1856
1857void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001858 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001859}
1860
1861void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001862 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001863}
1864
1865void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001866 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001867}
1868
Aart Bike9f37602015-10-09 11:15:55 -07001869void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001870 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001871}
1872
1873void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001874 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001875}
1876
1877void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001878 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001879}
1880
1881void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001882 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001883}
1884
1885void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001886 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001887}
1888
1889void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001890 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001891}
1892
1893void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001894 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001895}
1896
1897void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001898 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001899}
1900
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001901void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001902 LocationSummary* locations =
1903 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001904 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001905 case Primitive::kPrimBoolean:
1906 case Primitive::kPrimByte:
1907 case Primitive::kPrimShort:
1908 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001909 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001910 case Primitive::kPrimLong: {
1911 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001912 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001913 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1914 break;
1915 }
1916 case Primitive::kPrimFloat:
1917 case Primitive::kPrimDouble: {
1918 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001919 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001920 locations->SetOut(Location::RequiresRegister());
1921 break;
1922 }
1923 default:
1924 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1925 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001926}
1927
1928void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001929 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001930 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001931 Location left = locations->InAt(0);
1932 Location right = locations->InAt(1);
1933
Mark Mendell0c9497d2015-08-21 09:30:05 -04001934 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001935 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001936 Condition less_cond = kLess;
1937
Calin Juravleddb7df22014-11-25 20:56:51 +00001938 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001939 case Primitive::kPrimBoolean:
1940 case Primitive::kPrimByte:
1941 case Primitive::kPrimShort:
1942 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001943 case Primitive::kPrimInt: {
1944 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1945 if (right.IsConstant()) {
1946 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1947 codegen_->Compare32BitValue(left_reg, value);
1948 } else if (right.IsStackSlot()) {
1949 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1950 } else {
1951 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1952 }
1953 break;
1954 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001955 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001956 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1957 if (right.IsConstant()) {
1958 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001959 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001960 } else if (right.IsDoubleStackSlot()) {
1961 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001962 } else {
1963 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1964 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001965 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001966 }
1967 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001968 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1969 if (right.IsConstant()) {
1970 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1971 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1972 } else if (right.IsStackSlot()) {
1973 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1974 } else {
1975 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1976 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001977 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001978 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001979 break;
1980 }
1981 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001982 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1983 if (right.IsConstant()) {
1984 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1985 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1986 } else if (right.IsDoubleStackSlot()) {
1987 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1988 } else {
1989 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1990 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001991 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001992 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001993 break;
1994 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001995 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001996 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001997 }
Aart Bika19616e2016-02-01 18:57:58 -08001998
Calin Juravleddb7df22014-11-25 20:56:51 +00001999 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002000 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002001 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002002
Calin Juravle91debbc2014-11-26 19:01:09 +00002003 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002004 __ movl(out, Immediate(1));
2005 __ jmp(&done);
2006
2007 __ Bind(&less);
2008 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002009
2010 __ Bind(&done);
2011}
2012
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002013void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002014 LocationSummary* locations =
2015 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002016 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002017}
2018
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002019void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002020 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002021}
2022
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002023void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2024 LocationSummary* locations =
2025 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2026 locations->SetOut(Location::ConstantLocation(constant));
2027}
2028
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002029void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002030 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002031}
2032
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002033void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002034 LocationSummary* locations =
2035 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002036 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002037}
2038
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002039void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002040 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002041}
2042
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002043void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2044 LocationSummary* locations =
2045 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2046 locations->SetOut(Location::ConstantLocation(constant));
2047}
2048
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002049void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002050 // Will be generated at use site.
2051}
2052
2053void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2054 LocationSummary* locations =
2055 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2056 locations->SetOut(Location::ConstantLocation(constant));
2057}
2058
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002059void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2060 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002061 // Will be generated at use site.
2062}
2063
Calin Juravle27df7582015-04-17 19:12:31 +01002064void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2065 memory_barrier->SetLocations(nullptr);
2066}
2067
2068void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002069 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002070}
2071
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002072void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2073 ret->SetLocations(nullptr);
2074}
2075
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002076void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002077 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002078}
2079
2080void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002081 LocationSummary* locations =
2082 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002083 switch (ret->InputAt(0)->GetType()) {
2084 case Primitive::kPrimBoolean:
2085 case Primitive::kPrimByte:
2086 case Primitive::kPrimChar:
2087 case Primitive::kPrimShort:
2088 case Primitive::kPrimInt:
2089 case Primitive::kPrimNot:
2090 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002091 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002092 break;
2093
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002094 case Primitive::kPrimFloat:
2095 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002096 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002097 break;
2098
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002099 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002100 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002101 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002102}
2103
2104void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2105 if (kIsDebugBuild) {
2106 switch (ret->InputAt(0)->GetType()) {
2107 case Primitive::kPrimBoolean:
2108 case Primitive::kPrimByte:
2109 case Primitive::kPrimChar:
2110 case Primitive::kPrimShort:
2111 case Primitive::kPrimInt:
2112 case Primitive::kPrimNot:
2113 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002114 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002115 break;
2116
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002117 case Primitive::kPrimFloat:
2118 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002119 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002120 XMM0);
2121 break;
2122
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002123 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002124 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002125 }
2126 }
2127 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002128}
2129
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002130Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2131 switch (type) {
2132 case Primitive::kPrimBoolean:
2133 case Primitive::kPrimByte:
2134 case Primitive::kPrimChar:
2135 case Primitive::kPrimShort:
2136 case Primitive::kPrimInt:
2137 case Primitive::kPrimNot:
2138 case Primitive::kPrimLong:
2139 return Location::RegisterLocation(RAX);
2140
2141 case Primitive::kPrimVoid:
2142 return Location::NoLocation();
2143
2144 case Primitive::kPrimDouble:
2145 case Primitive::kPrimFloat:
2146 return Location::FpuRegisterLocation(XMM0);
2147 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002148
2149 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002150}
2151
2152Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2153 return Location::RegisterLocation(kMethodRegisterArgument);
2154}
2155
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002156Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002157 switch (type) {
2158 case Primitive::kPrimBoolean:
2159 case Primitive::kPrimByte:
2160 case Primitive::kPrimChar:
2161 case Primitive::kPrimShort:
2162 case Primitive::kPrimInt:
2163 case Primitive::kPrimNot: {
2164 uint32_t index = gp_index_++;
2165 stack_index_++;
2166 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002167 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002168 } else {
2169 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2170 }
2171 }
2172
2173 case Primitive::kPrimLong: {
2174 uint32_t index = gp_index_;
2175 stack_index_ += 2;
2176 if (index < calling_convention.GetNumberOfRegisters()) {
2177 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002178 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002179 } else {
2180 gp_index_ += 2;
2181 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2182 }
2183 }
2184
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002185 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002186 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002187 stack_index_++;
2188 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002189 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002190 } else {
2191 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2192 }
2193 }
2194
2195 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002196 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002197 stack_index_ += 2;
2198 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002199 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002200 } else {
2201 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2202 }
2203 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002204
2205 case Primitive::kPrimVoid:
2206 LOG(FATAL) << "Unexpected parameter type " << type;
2207 break;
2208 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002209 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002210}
2211
Calin Juravle175dc732015-08-25 15:42:32 +01002212void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2213 // The trampoline uses the same calling convention as dex calling conventions,
2214 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2215 // the method_idx.
2216 HandleInvoke(invoke);
2217}
2218
2219void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2220 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2221}
2222
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002223void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002224 // Explicit clinit checks triggered by static invokes must have been pruned by
2225 // art::PrepareForRegisterAllocation.
2226 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002227
Mark Mendellfb8d2792015-03-31 22:16:59 -04002228 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002229 if (intrinsic.TryDispatch(invoke)) {
2230 return;
2231 }
2232
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002233 HandleInvoke(invoke);
2234}
2235
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002236static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2237 if (invoke->GetLocations()->Intrinsified()) {
2238 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2239 intrinsic.Dispatch(invoke);
2240 return true;
2241 }
2242 return false;
2243}
2244
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002245void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002246 // Explicit clinit checks triggered by static invokes must have been pruned by
2247 // art::PrepareForRegisterAllocation.
2248 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002249
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002250 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2251 return;
2252 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002253
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002254 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002255 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002256 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002257 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002258}
2259
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002260void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002261 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002262 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002263}
2264
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002265void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002266 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002267 if (intrinsic.TryDispatch(invoke)) {
2268 return;
2269 }
2270
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002271 HandleInvoke(invoke);
2272}
2273
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002274void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002275 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2276 return;
2277 }
2278
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002279 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002280 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002281 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002282}
2283
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002284void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2285 HandleInvoke(invoke);
2286 // Add the hidden argument.
2287 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2288}
2289
2290void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2291 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002292 LocationSummary* locations = invoke->GetLocations();
2293 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2294 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002295 Location receiver = locations->InAt(0);
2296 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2297
Roland Levillain0d5a2812015-11-13 10:07:31 +00002298 // Set the hidden argument. This is safe to do this here, as RAX
2299 // won't be modified thereafter, before the `call` instruction.
2300 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002301 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002302
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002303 if (receiver.IsStackSlot()) {
2304 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002305 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002306 __ movl(temp, Address(temp, class_offset));
2307 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002308 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002309 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002310 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002311 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002312 // Instead of simply (possibly) unpoisoning `temp` here, we should
2313 // emit a read barrier for the previous class reference load.
2314 // However this is not required in practice, as this is an
2315 // intermediate/temporary reference and because the current
2316 // concurrent copying collector keeps the from-space memory
2317 // intact/accessible until the end of the marking phase (the
2318 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002319 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002320 // temp = temp->GetAddressOfIMT()
2321 __ movq(temp,
2322 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2323 // temp = temp->GetImtEntryAt(method_offset);
2324 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002325 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002326 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002327 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002328 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002329 __ call(Address(temp,
2330 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002331
2332 DCHECK(!codegen_->IsLeafMethod());
2333 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2334}
2335
Roland Levillain88cb1752014-10-20 16:36:47 +01002336void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2337 LocationSummary* locations =
2338 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2339 switch (neg->GetResultType()) {
2340 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002341 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002342 locations->SetInAt(0, Location::RequiresRegister());
2343 locations->SetOut(Location::SameAsFirstInput());
2344 break;
2345
Roland Levillain88cb1752014-10-20 16:36:47 +01002346 case Primitive::kPrimFloat:
2347 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002348 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002349 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002350 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002351 break;
2352
2353 default:
2354 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2355 }
2356}
2357
2358void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2359 LocationSummary* locations = neg->GetLocations();
2360 Location out = locations->Out();
2361 Location in = locations->InAt(0);
2362 switch (neg->GetResultType()) {
2363 case Primitive::kPrimInt:
2364 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002365 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002366 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002367 break;
2368
2369 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002370 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002371 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002372 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002373 break;
2374
Roland Levillain5368c212014-11-27 15:03:41 +00002375 case Primitive::kPrimFloat: {
2376 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002377 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002378 // Implement float negation with an exclusive or with value
2379 // 0x80000000 (mask for bit 31, representing the sign of a
2380 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002381 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002382 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002383 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002384 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002385
Roland Levillain5368c212014-11-27 15:03:41 +00002386 case Primitive::kPrimDouble: {
2387 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002388 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002389 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002390 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002391 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002392 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002393 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002394 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002395 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002396
2397 default:
2398 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2399 }
2400}
2401
Roland Levillaindff1f282014-11-05 14:15:05 +00002402void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2403 LocationSummary* locations =
2404 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2405 Primitive::Type result_type = conversion->GetResultType();
2406 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002407 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002408
David Brazdilb2bd1c52015-03-25 11:17:37 +00002409 // The Java language does not allow treating boolean as an integral type but
2410 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002411
Roland Levillaindff1f282014-11-05 14:15:05 +00002412 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002413 case Primitive::kPrimByte:
2414 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002415 case Primitive::kPrimLong:
2416 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002417 case Primitive::kPrimBoolean:
2418 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002419 case Primitive::kPrimShort:
2420 case Primitive::kPrimInt:
2421 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002422 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002423 locations->SetInAt(0, Location::Any());
2424 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2425 break;
2426
2427 default:
2428 LOG(FATAL) << "Unexpected type conversion from " << input_type
2429 << " to " << result_type;
2430 }
2431 break;
2432
Roland Levillain01a8d712014-11-14 16:27:39 +00002433 case Primitive::kPrimShort:
2434 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002435 case Primitive::kPrimLong:
2436 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002437 case Primitive::kPrimBoolean:
2438 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002439 case Primitive::kPrimByte:
2440 case Primitive::kPrimInt:
2441 case Primitive::kPrimChar:
2442 // Processing a Dex `int-to-short' instruction.
2443 locations->SetInAt(0, Location::Any());
2444 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2445 break;
2446
2447 default:
2448 LOG(FATAL) << "Unexpected type conversion from " << input_type
2449 << " to " << result_type;
2450 }
2451 break;
2452
Roland Levillain946e1432014-11-11 17:35:19 +00002453 case Primitive::kPrimInt:
2454 switch (input_type) {
2455 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002456 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002457 locations->SetInAt(0, Location::Any());
2458 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2459 break;
2460
2461 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002462 // Processing a Dex `float-to-int' instruction.
2463 locations->SetInAt(0, Location::RequiresFpuRegister());
2464 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002465 break;
2466
Roland Levillain946e1432014-11-11 17:35:19 +00002467 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002468 // Processing a Dex `double-to-int' instruction.
2469 locations->SetInAt(0, Location::RequiresFpuRegister());
2470 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002471 break;
2472
2473 default:
2474 LOG(FATAL) << "Unexpected type conversion from " << input_type
2475 << " to " << result_type;
2476 }
2477 break;
2478
Roland Levillaindff1f282014-11-05 14:15:05 +00002479 case Primitive::kPrimLong:
2480 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002481 case Primitive::kPrimBoolean:
2482 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002483 case Primitive::kPrimByte:
2484 case Primitive::kPrimShort:
2485 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002486 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002487 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002488 // TODO: We would benefit from a (to-be-implemented)
2489 // Location::RegisterOrStackSlot requirement for this input.
2490 locations->SetInAt(0, Location::RequiresRegister());
2491 locations->SetOut(Location::RequiresRegister());
2492 break;
2493
2494 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002495 // Processing a Dex `float-to-long' instruction.
2496 locations->SetInAt(0, Location::RequiresFpuRegister());
2497 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002498 break;
2499
Roland Levillaindff1f282014-11-05 14:15:05 +00002500 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002501 // Processing a Dex `double-to-long' instruction.
2502 locations->SetInAt(0, Location::RequiresFpuRegister());
2503 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002504 break;
2505
2506 default:
2507 LOG(FATAL) << "Unexpected type conversion from " << input_type
2508 << " to " << result_type;
2509 }
2510 break;
2511
Roland Levillain981e4542014-11-14 11:47:14 +00002512 case Primitive::kPrimChar:
2513 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002514 case Primitive::kPrimLong:
2515 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002516 case Primitive::kPrimBoolean:
2517 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002518 case Primitive::kPrimByte:
2519 case Primitive::kPrimShort:
2520 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002521 // Processing a Dex `int-to-char' instruction.
2522 locations->SetInAt(0, Location::Any());
2523 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2524 break;
2525
2526 default:
2527 LOG(FATAL) << "Unexpected type conversion from " << input_type
2528 << " to " << result_type;
2529 }
2530 break;
2531
Roland Levillaindff1f282014-11-05 14:15:05 +00002532 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002533 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002534 case Primitive::kPrimBoolean:
2535 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002536 case Primitive::kPrimByte:
2537 case Primitive::kPrimShort:
2538 case Primitive::kPrimInt:
2539 case Primitive::kPrimChar:
2540 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002541 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002542 locations->SetOut(Location::RequiresFpuRegister());
2543 break;
2544
2545 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002546 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002547 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002548 locations->SetOut(Location::RequiresFpuRegister());
2549 break;
2550
Roland Levillaincff13742014-11-17 14:32:17 +00002551 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002552 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002553 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002554 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002555 break;
2556
2557 default:
2558 LOG(FATAL) << "Unexpected type conversion from " << input_type
2559 << " to " << result_type;
2560 };
2561 break;
2562
Roland Levillaindff1f282014-11-05 14:15:05 +00002563 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002564 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002565 case Primitive::kPrimBoolean:
2566 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002567 case Primitive::kPrimByte:
2568 case Primitive::kPrimShort:
2569 case Primitive::kPrimInt:
2570 case Primitive::kPrimChar:
2571 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002572 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002573 locations->SetOut(Location::RequiresFpuRegister());
2574 break;
2575
2576 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002577 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002578 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002579 locations->SetOut(Location::RequiresFpuRegister());
2580 break;
2581
Roland Levillaincff13742014-11-17 14:32:17 +00002582 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002583 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002584 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002585 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002586 break;
2587
2588 default:
2589 LOG(FATAL) << "Unexpected type conversion from " << input_type
2590 << " to " << result_type;
2591 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002592 break;
2593
2594 default:
2595 LOG(FATAL) << "Unexpected type conversion from " << input_type
2596 << " to " << result_type;
2597 }
2598}
2599
2600void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2601 LocationSummary* locations = conversion->GetLocations();
2602 Location out = locations->Out();
2603 Location in = locations->InAt(0);
2604 Primitive::Type result_type = conversion->GetResultType();
2605 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002606 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002607 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002608 case Primitive::kPrimByte:
2609 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002610 case Primitive::kPrimLong:
2611 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002612 case Primitive::kPrimBoolean:
2613 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002614 case Primitive::kPrimShort:
2615 case Primitive::kPrimInt:
2616 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002617 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002618 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002619 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002620 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002621 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002622 Address(CpuRegister(RSP), in.GetStackIndex()));
2623 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002624 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002625 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002626 }
2627 break;
2628
2629 default:
2630 LOG(FATAL) << "Unexpected type conversion from " << input_type
2631 << " to " << result_type;
2632 }
2633 break;
2634
Roland Levillain01a8d712014-11-14 16:27:39 +00002635 case Primitive::kPrimShort:
2636 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002637 case Primitive::kPrimLong:
2638 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002639 case Primitive::kPrimBoolean:
2640 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002641 case Primitive::kPrimByte:
2642 case Primitive::kPrimInt:
2643 case Primitive::kPrimChar:
2644 // Processing a Dex `int-to-short' instruction.
2645 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002646 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002647 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002648 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002649 Address(CpuRegister(RSP), in.GetStackIndex()));
2650 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002651 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002652 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002653 }
2654 break;
2655
2656 default:
2657 LOG(FATAL) << "Unexpected type conversion from " << input_type
2658 << " to " << result_type;
2659 }
2660 break;
2661
Roland Levillain946e1432014-11-11 17:35:19 +00002662 case Primitive::kPrimInt:
2663 switch (input_type) {
2664 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002665 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002666 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002667 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002668 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002669 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002670 Address(CpuRegister(RSP), in.GetStackIndex()));
2671 } else {
2672 DCHECK(in.IsConstant());
2673 DCHECK(in.GetConstant()->IsLongConstant());
2674 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002675 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002676 }
2677 break;
2678
Roland Levillain3f8f9362014-12-02 17:45:01 +00002679 case Primitive::kPrimFloat: {
2680 // Processing a Dex `float-to-int' instruction.
2681 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2682 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002683 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002684
2685 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002686 // if input >= (float)INT_MAX goto done
2687 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002688 __ j(kAboveEqual, &done);
2689 // if input == NaN goto nan
2690 __ j(kUnordered, &nan);
2691 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002692 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002693 __ jmp(&done);
2694 __ Bind(&nan);
2695 // output = 0
2696 __ xorl(output, output);
2697 __ Bind(&done);
2698 break;
2699 }
2700
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002701 case Primitive::kPrimDouble: {
2702 // Processing a Dex `double-to-int' instruction.
2703 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2704 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002705 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002706
2707 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002708 // if input >= (double)INT_MAX goto done
2709 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002710 __ j(kAboveEqual, &done);
2711 // if input == NaN goto nan
2712 __ j(kUnordered, &nan);
2713 // output = double-to-int-truncate(input)
2714 __ cvttsd2si(output, input);
2715 __ jmp(&done);
2716 __ Bind(&nan);
2717 // output = 0
2718 __ xorl(output, output);
2719 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002720 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002721 }
Roland Levillain946e1432014-11-11 17:35:19 +00002722
2723 default:
2724 LOG(FATAL) << "Unexpected type conversion from " << input_type
2725 << " to " << result_type;
2726 }
2727 break;
2728
Roland Levillaindff1f282014-11-05 14:15:05 +00002729 case Primitive::kPrimLong:
2730 switch (input_type) {
2731 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002732 case Primitive::kPrimBoolean:
2733 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002734 case Primitive::kPrimByte:
2735 case Primitive::kPrimShort:
2736 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002737 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002738 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002739 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002740 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002741 break;
2742
Roland Levillain624279f2014-12-04 11:54:28 +00002743 case Primitive::kPrimFloat: {
2744 // Processing a Dex `float-to-long' instruction.
2745 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2746 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002747 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002748
Mark Mendell92e83bf2015-05-07 11:25:03 -04002749 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002750 // if input >= (float)LONG_MAX goto done
2751 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002752 __ j(kAboveEqual, &done);
2753 // if input == NaN goto nan
2754 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002755 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002756 __ cvttss2si(output, input, true);
2757 __ jmp(&done);
2758 __ Bind(&nan);
2759 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002760 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002761 __ Bind(&done);
2762 break;
2763 }
2764
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002765 case Primitive::kPrimDouble: {
2766 // Processing a Dex `double-to-long' instruction.
2767 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2768 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002769 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002770
Mark Mendell92e83bf2015-05-07 11:25:03 -04002771 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002772 // if input >= (double)LONG_MAX goto done
2773 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002774 __ j(kAboveEqual, &done);
2775 // if input == NaN goto nan
2776 __ j(kUnordered, &nan);
2777 // output = double-to-long-truncate(input)
2778 __ cvttsd2si(output, input, true);
2779 __ jmp(&done);
2780 __ Bind(&nan);
2781 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002782 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002783 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002784 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002785 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002786
2787 default:
2788 LOG(FATAL) << "Unexpected type conversion from " << input_type
2789 << " to " << result_type;
2790 }
2791 break;
2792
Roland Levillain981e4542014-11-14 11:47:14 +00002793 case Primitive::kPrimChar:
2794 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002795 case Primitive::kPrimLong:
2796 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002797 case Primitive::kPrimBoolean:
2798 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002799 case Primitive::kPrimByte:
2800 case Primitive::kPrimShort:
2801 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002802 // Processing a Dex `int-to-char' instruction.
2803 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002804 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002805 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002806 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002807 Address(CpuRegister(RSP), in.GetStackIndex()));
2808 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002809 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002810 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002811 }
2812 break;
2813
2814 default:
2815 LOG(FATAL) << "Unexpected type conversion from " << input_type
2816 << " to " << result_type;
2817 }
2818 break;
2819
Roland Levillaindff1f282014-11-05 14:15:05 +00002820 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002821 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002822 case Primitive::kPrimBoolean:
2823 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002824 case Primitive::kPrimByte:
2825 case Primitive::kPrimShort:
2826 case Primitive::kPrimInt:
2827 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002828 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002829 if (in.IsRegister()) {
2830 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2831 } else if (in.IsConstant()) {
2832 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2833 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002834 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002835 } else {
2836 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2837 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2838 }
Roland Levillaincff13742014-11-17 14:32:17 +00002839 break;
2840
2841 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002842 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002843 if (in.IsRegister()) {
2844 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2845 } else if (in.IsConstant()) {
2846 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2847 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002848 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002849 } else {
2850 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2851 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2852 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002853 break;
2854
Roland Levillaincff13742014-11-17 14:32:17 +00002855 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002856 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002857 if (in.IsFpuRegister()) {
2858 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2859 } else if (in.IsConstant()) {
2860 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2861 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002862 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002863 } else {
2864 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2865 Address(CpuRegister(RSP), in.GetStackIndex()));
2866 }
Roland Levillaincff13742014-11-17 14:32:17 +00002867 break;
2868
2869 default:
2870 LOG(FATAL) << "Unexpected type conversion from " << input_type
2871 << " to " << result_type;
2872 };
2873 break;
2874
Roland Levillaindff1f282014-11-05 14:15:05 +00002875 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002876 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002877 case Primitive::kPrimBoolean:
2878 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002879 case Primitive::kPrimByte:
2880 case Primitive::kPrimShort:
2881 case Primitive::kPrimInt:
2882 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002883 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002884 if (in.IsRegister()) {
2885 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2886 } else if (in.IsConstant()) {
2887 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2888 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002889 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002890 } else {
2891 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2892 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2893 }
Roland Levillaincff13742014-11-17 14:32:17 +00002894 break;
2895
2896 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002897 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002898 if (in.IsRegister()) {
2899 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2900 } else if (in.IsConstant()) {
2901 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2902 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002903 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002904 } else {
2905 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2906 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2907 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002908 break;
2909
Roland Levillaincff13742014-11-17 14:32:17 +00002910 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002911 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002912 if (in.IsFpuRegister()) {
2913 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2914 } else if (in.IsConstant()) {
2915 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2916 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002917 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002918 } else {
2919 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2920 Address(CpuRegister(RSP), in.GetStackIndex()));
2921 }
Roland Levillaincff13742014-11-17 14:32:17 +00002922 break;
2923
2924 default:
2925 LOG(FATAL) << "Unexpected type conversion from " << input_type
2926 << " to " << result_type;
2927 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002928 break;
2929
2930 default:
2931 LOG(FATAL) << "Unexpected type conversion from " << input_type
2932 << " to " << result_type;
2933 }
2934}
2935
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002936void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002937 LocationSummary* locations =
2938 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002939 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002940 case Primitive::kPrimInt: {
2941 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002942 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2943 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002944 break;
2945 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002946
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002947 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002948 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002949 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002950 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002951 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002952 break;
2953 }
2954
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002955 case Primitive::kPrimDouble:
2956 case Primitive::kPrimFloat: {
2957 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002958 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002959 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002960 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002961 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002962
2963 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002964 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002965 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002966}
2967
2968void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2969 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002970 Location first = locations->InAt(0);
2971 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002972 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002973
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002974 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002975 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002976 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002977 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2978 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002979 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2980 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002981 } else {
2982 __ leal(out.AsRegister<CpuRegister>(), Address(
2983 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2984 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002985 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002986 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2987 __ addl(out.AsRegister<CpuRegister>(),
2988 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2989 } else {
2990 __ leal(out.AsRegister<CpuRegister>(), Address(
2991 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2992 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002993 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002994 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002995 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002996 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002997 break;
2998 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002999
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003000 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003001 if (second.IsRegister()) {
3002 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3003 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003004 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3005 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003006 } else {
3007 __ leaq(out.AsRegister<CpuRegister>(), Address(
3008 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3009 }
3010 } else {
3011 DCHECK(second.IsConstant());
3012 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3013 int32_t int32_value = Low32Bits(value);
3014 DCHECK_EQ(int32_value, value);
3015 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3016 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3017 } else {
3018 __ leaq(out.AsRegister<CpuRegister>(), Address(
3019 first.AsRegister<CpuRegister>(), int32_value));
3020 }
3021 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003022 break;
3023 }
3024
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003025 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003026 if (second.IsFpuRegister()) {
3027 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3028 } else if (second.IsConstant()) {
3029 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003030 codegen_->LiteralFloatAddress(
3031 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003032 } else {
3033 DCHECK(second.IsStackSlot());
3034 __ addss(first.AsFpuRegister<XmmRegister>(),
3035 Address(CpuRegister(RSP), second.GetStackIndex()));
3036 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003038 }
3039
3040 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003041 if (second.IsFpuRegister()) {
3042 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3043 } else if (second.IsConstant()) {
3044 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003045 codegen_->LiteralDoubleAddress(
3046 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003047 } else {
3048 DCHECK(second.IsDoubleStackSlot());
3049 __ addsd(first.AsFpuRegister<XmmRegister>(),
3050 Address(CpuRegister(RSP), second.GetStackIndex()));
3051 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003052 break;
3053 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003054
3055 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003056 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003057 }
3058}
3059
3060void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003061 LocationSummary* locations =
3062 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003063 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003064 case Primitive::kPrimInt: {
3065 locations->SetInAt(0, Location::RequiresRegister());
3066 locations->SetInAt(1, Location::Any());
3067 locations->SetOut(Location::SameAsFirstInput());
3068 break;
3069 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003070 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003071 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003072 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003073 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003074 break;
3075 }
Calin Juravle11351682014-10-23 15:38:15 +01003076 case Primitive::kPrimFloat:
3077 case Primitive::kPrimDouble: {
3078 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003079 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003080 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003081 break;
Calin Juravle11351682014-10-23 15:38:15 +01003082 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003083 default:
Calin Juravle11351682014-10-23 15:38:15 +01003084 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003085 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003086}
3087
3088void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3089 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003090 Location first = locations->InAt(0);
3091 Location second = locations->InAt(1);
3092 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003093 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003094 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003095 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003096 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003097 } else if (second.IsConstant()) {
3098 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003099 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003100 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003101 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003102 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003103 break;
3104 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003105 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003106 if (second.IsConstant()) {
3107 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3108 DCHECK(IsInt<32>(value));
3109 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3110 } else {
3111 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3112 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003113 break;
3114 }
3115
Calin Juravle11351682014-10-23 15:38:15 +01003116 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003117 if (second.IsFpuRegister()) {
3118 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3119 } else if (second.IsConstant()) {
3120 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003121 codegen_->LiteralFloatAddress(
3122 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003123 } else {
3124 DCHECK(second.IsStackSlot());
3125 __ subss(first.AsFpuRegister<XmmRegister>(),
3126 Address(CpuRegister(RSP), second.GetStackIndex()));
3127 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003128 break;
Calin Juravle11351682014-10-23 15:38:15 +01003129 }
3130
3131 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003132 if (second.IsFpuRegister()) {
3133 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3134 } else if (second.IsConstant()) {
3135 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003136 codegen_->LiteralDoubleAddress(
3137 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003138 } else {
3139 DCHECK(second.IsDoubleStackSlot());
3140 __ subsd(first.AsFpuRegister<XmmRegister>(),
3141 Address(CpuRegister(RSP), second.GetStackIndex()));
3142 }
Calin Juravle11351682014-10-23 15:38:15 +01003143 break;
3144 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003145
3146 default:
Calin Juravle11351682014-10-23 15:38:15 +01003147 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003148 }
3149}
3150
Calin Juravle34bacdf2014-10-07 20:23:36 +01003151void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3152 LocationSummary* locations =
3153 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3154 switch (mul->GetResultType()) {
3155 case Primitive::kPrimInt: {
3156 locations->SetInAt(0, Location::RequiresRegister());
3157 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003158 if (mul->InputAt(1)->IsIntConstant()) {
3159 // Can use 3 operand multiply.
3160 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3161 } else {
3162 locations->SetOut(Location::SameAsFirstInput());
3163 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003164 break;
3165 }
3166 case Primitive::kPrimLong: {
3167 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003168 locations->SetInAt(1, Location::Any());
3169 if (mul->InputAt(1)->IsLongConstant() &&
3170 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003171 // Can use 3 operand multiply.
3172 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3173 } else {
3174 locations->SetOut(Location::SameAsFirstInput());
3175 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003176 break;
3177 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003178 case Primitive::kPrimFloat:
3179 case Primitive::kPrimDouble: {
3180 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003181 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003182 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003183 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003184 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003185
3186 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003187 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003188 }
3189}
3190
3191void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3192 LocationSummary* locations = mul->GetLocations();
3193 Location first = locations->InAt(0);
3194 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003195 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003196 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003197 case Primitive::kPrimInt:
3198 // The constant may have ended up in a register, so test explicitly to avoid
3199 // problems where the output may not be the same as the first operand.
3200 if (mul->InputAt(1)->IsIntConstant()) {
3201 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3202 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3203 } else if (second.IsRegister()) {
3204 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003205 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003206 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003207 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003208 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003209 __ imull(first.AsRegister<CpuRegister>(),
3210 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003211 }
3212 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003213 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003214 // The constant may have ended up in a register, so test explicitly to avoid
3215 // problems where the output may not be the same as the first operand.
3216 if (mul->InputAt(1)->IsLongConstant()) {
3217 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3218 if (IsInt<32>(value)) {
3219 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3220 Immediate(static_cast<int32_t>(value)));
3221 } else {
3222 // Have to use the constant area.
3223 DCHECK(first.Equals(out));
3224 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3225 }
3226 } else if (second.IsRegister()) {
3227 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003228 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003229 } else {
3230 DCHECK(second.IsDoubleStackSlot());
3231 DCHECK(first.Equals(out));
3232 __ imulq(first.AsRegister<CpuRegister>(),
3233 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003234 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003235 break;
3236 }
3237
Calin Juravleb5bfa962014-10-21 18:02:24 +01003238 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003239 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003240 if (second.IsFpuRegister()) {
3241 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3242 } else if (second.IsConstant()) {
3243 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003244 codegen_->LiteralFloatAddress(
3245 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003246 } else {
3247 DCHECK(second.IsStackSlot());
3248 __ mulss(first.AsFpuRegister<XmmRegister>(),
3249 Address(CpuRegister(RSP), second.GetStackIndex()));
3250 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003251 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003252 }
3253
3254 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003255 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003256 if (second.IsFpuRegister()) {
3257 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3258 } else if (second.IsConstant()) {
3259 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003260 codegen_->LiteralDoubleAddress(
3261 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003262 } else {
3263 DCHECK(second.IsDoubleStackSlot());
3264 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3265 Address(CpuRegister(RSP), second.GetStackIndex()));
3266 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003267 break;
3268 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003269
3270 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003271 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003272 }
3273}
3274
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003275void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3276 uint32_t stack_adjustment, bool is_float) {
3277 if (source.IsStackSlot()) {
3278 DCHECK(is_float);
3279 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3280 } else if (source.IsDoubleStackSlot()) {
3281 DCHECK(!is_float);
3282 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3283 } else {
3284 // Write the value to the temporary location on the stack and load to FP stack.
3285 if (is_float) {
3286 Location stack_temp = Location::StackSlot(temp_offset);
3287 codegen_->Move(stack_temp, source);
3288 __ flds(Address(CpuRegister(RSP), temp_offset));
3289 } else {
3290 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3291 codegen_->Move(stack_temp, source);
3292 __ fldl(Address(CpuRegister(RSP), temp_offset));
3293 }
3294 }
3295}
3296
3297void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3298 Primitive::Type type = rem->GetResultType();
3299 bool is_float = type == Primitive::kPrimFloat;
3300 size_t elem_size = Primitive::ComponentSize(type);
3301 LocationSummary* locations = rem->GetLocations();
3302 Location first = locations->InAt(0);
3303 Location second = locations->InAt(1);
3304 Location out = locations->Out();
3305
3306 // Create stack space for 2 elements.
3307 // TODO: enhance register allocator to ask for stack temporaries.
3308 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3309
3310 // Load the values to the FP stack in reverse order, using temporaries if needed.
3311 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3312 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3313
3314 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003315 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003316 __ Bind(&retry);
3317 __ fprem();
3318
3319 // Move FP status to AX.
3320 __ fstsw();
3321
3322 // And see if the argument reduction is complete. This is signaled by the
3323 // C2 FPU flag bit set to 0.
3324 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3325 __ j(kNotEqual, &retry);
3326
3327 // We have settled on the final value. Retrieve it into an XMM register.
3328 // Store FP top of stack to real stack.
3329 if (is_float) {
3330 __ fsts(Address(CpuRegister(RSP), 0));
3331 } else {
3332 __ fstl(Address(CpuRegister(RSP), 0));
3333 }
3334
3335 // Pop the 2 items from the FP stack.
3336 __ fucompp();
3337
3338 // Load the value from the stack into an XMM register.
3339 DCHECK(out.IsFpuRegister()) << out;
3340 if (is_float) {
3341 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3342 } else {
3343 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3344 }
3345
3346 // And remove the temporary stack space we allocated.
3347 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3348}
3349
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003350void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3351 DCHECK(instruction->IsDiv() || instruction->IsRem());
3352
3353 LocationSummary* locations = instruction->GetLocations();
3354 Location second = locations->InAt(1);
3355 DCHECK(second.IsConstant());
3356
3357 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3358 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003359 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003360
3361 DCHECK(imm == 1 || imm == -1);
3362
3363 switch (instruction->GetResultType()) {
3364 case Primitive::kPrimInt: {
3365 if (instruction->IsRem()) {
3366 __ xorl(output_register, output_register);
3367 } else {
3368 __ movl(output_register, input_register);
3369 if (imm == -1) {
3370 __ negl(output_register);
3371 }
3372 }
3373 break;
3374 }
3375
3376 case Primitive::kPrimLong: {
3377 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003378 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003379 } else {
3380 __ movq(output_register, input_register);
3381 if (imm == -1) {
3382 __ negq(output_register);
3383 }
3384 }
3385 break;
3386 }
3387
3388 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003389 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003390 }
3391}
3392
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003393void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003394 LocationSummary* locations = instruction->GetLocations();
3395 Location second = locations->InAt(1);
3396
3397 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3398 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3399
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003400 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003401 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3402 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003403
3404 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3405
3406 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003407 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003408 __ testl(numerator, numerator);
3409 __ cmov(kGreaterEqual, tmp, numerator);
3410 int shift = CTZ(imm);
3411 __ sarl(tmp, Immediate(shift));
3412
3413 if (imm < 0) {
3414 __ negl(tmp);
3415 }
3416
3417 __ movl(output_register, tmp);
3418 } else {
3419 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3420 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3421
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003422 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003423 __ addq(rdx, numerator);
3424 __ testq(numerator, numerator);
3425 __ cmov(kGreaterEqual, rdx, numerator);
3426 int shift = CTZ(imm);
3427 __ sarq(rdx, Immediate(shift));
3428
3429 if (imm < 0) {
3430 __ negq(rdx);
3431 }
3432
3433 __ movq(output_register, rdx);
3434 }
3435}
3436
3437void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3438 DCHECK(instruction->IsDiv() || instruction->IsRem());
3439
3440 LocationSummary* locations = instruction->GetLocations();
3441 Location second = locations->InAt(1);
3442
3443 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3444 : locations->GetTemp(0).AsRegister<CpuRegister>();
3445 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3446 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3447 : locations->Out().AsRegister<CpuRegister>();
3448 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3449
3450 DCHECK_EQ(RAX, eax.AsRegister());
3451 DCHECK_EQ(RDX, edx.AsRegister());
3452 if (instruction->IsDiv()) {
3453 DCHECK_EQ(RAX, out.AsRegister());
3454 } else {
3455 DCHECK_EQ(RDX, out.AsRegister());
3456 }
3457
3458 int64_t magic;
3459 int shift;
3460
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003461 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003462 if (instruction->GetResultType() == Primitive::kPrimInt) {
3463 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3464
3465 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3466
3467 __ movl(numerator, eax);
3468
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003469 __ movl(eax, Immediate(magic));
3470 __ imull(numerator);
3471
3472 if (imm > 0 && magic < 0) {
3473 __ addl(edx, numerator);
3474 } else if (imm < 0 && magic > 0) {
3475 __ subl(edx, numerator);
3476 }
3477
3478 if (shift != 0) {
3479 __ sarl(edx, Immediate(shift));
3480 }
3481
3482 __ movl(eax, edx);
3483 __ shrl(edx, Immediate(31));
3484 __ addl(edx, eax);
3485
3486 if (instruction->IsRem()) {
3487 __ movl(eax, numerator);
3488 __ imull(edx, Immediate(imm));
3489 __ subl(eax, edx);
3490 __ movl(edx, eax);
3491 } else {
3492 __ movl(eax, edx);
3493 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003494 } else {
3495 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3496
3497 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3498
3499 CpuRegister rax = eax;
3500 CpuRegister rdx = edx;
3501
3502 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3503
3504 // Save the numerator.
3505 __ movq(numerator, rax);
3506
3507 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003508 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003509
3510 // RDX:RAX = magic * numerator
3511 __ imulq(numerator);
3512
3513 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003514 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003515 __ addq(rdx, numerator);
3516 } else if (imm < 0 && magic > 0) {
3517 // RDX -= numerator
3518 __ subq(rdx, numerator);
3519 }
3520
3521 // Shift if needed.
3522 if (shift != 0) {
3523 __ sarq(rdx, Immediate(shift));
3524 }
3525
3526 // RDX += 1 if RDX < 0
3527 __ movq(rax, rdx);
3528 __ shrq(rdx, Immediate(63));
3529 __ addq(rdx, rax);
3530
3531 if (instruction->IsRem()) {
3532 __ movq(rax, numerator);
3533
3534 if (IsInt<32>(imm)) {
3535 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3536 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003537 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003538 }
3539
3540 __ subq(rax, rdx);
3541 __ movq(rdx, rax);
3542 } else {
3543 __ movq(rax, rdx);
3544 }
3545 }
3546}
3547
Calin Juravlebacfec32014-11-14 15:54:36 +00003548void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3549 DCHECK(instruction->IsDiv() || instruction->IsRem());
3550 Primitive::Type type = instruction->GetResultType();
3551 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3552
3553 bool is_div = instruction->IsDiv();
3554 LocationSummary* locations = instruction->GetLocations();
3555
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003556 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3557 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003558
Roland Levillain271ab9c2014-11-27 15:23:57 +00003559 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003560 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003561
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003562 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003563 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003564
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003565 if (imm == 0) {
3566 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3567 } else if (imm == 1 || imm == -1) {
3568 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003569 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003570 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003571 } else {
3572 DCHECK(imm <= -2 || imm >= 2);
3573 GenerateDivRemWithAnyConstant(instruction);
3574 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003575 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003576 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003577 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003578 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003579 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003580
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003581 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3582 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3583 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3584 // so it's safe to just use negl instead of more complex comparisons.
3585 if (type == Primitive::kPrimInt) {
3586 __ cmpl(second_reg, Immediate(-1));
3587 __ j(kEqual, slow_path->GetEntryLabel());
3588 // edx:eax <- sign-extended of eax
3589 __ cdq();
3590 // eax = quotient, edx = remainder
3591 __ idivl(second_reg);
3592 } else {
3593 __ cmpq(second_reg, Immediate(-1));
3594 __ j(kEqual, slow_path->GetEntryLabel());
3595 // rdx:rax <- sign-extended of rax
3596 __ cqo();
3597 // rax = quotient, rdx = remainder
3598 __ idivq(second_reg);
3599 }
3600 __ Bind(slow_path->GetExitLabel());
3601 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003602}
3603
Calin Juravle7c4954d2014-10-28 16:57:40 +00003604void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3605 LocationSummary* locations =
3606 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3607 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003608 case Primitive::kPrimInt:
3609 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003610 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003611 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003612 locations->SetOut(Location::SameAsFirstInput());
3613 // Intel uses edx:eax as the dividend.
3614 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003615 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3616 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3617 // output and request another temp.
3618 if (div->InputAt(1)->IsConstant()) {
3619 locations->AddTemp(Location::RequiresRegister());
3620 }
Calin Juravled0d48522014-11-04 16:40:20 +00003621 break;
3622 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003623
Calin Juravle7c4954d2014-10-28 16:57:40 +00003624 case Primitive::kPrimFloat:
3625 case Primitive::kPrimDouble: {
3626 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003627 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003628 locations->SetOut(Location::SameAsFirstInput());
3629 break;
3630 }
3631
3632 default:
3633 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3634 }
3635}
3636
3637void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3638 LocationSummary* locations = div->GetLocations();
3639 Location first = locations->InAt(0);
3640 Location second = locations->InAt(1);
3641 DCHECK(first.Equals(locations->Out()));
3642
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003643 Primitive::Type type = div->GetResultType();
3644 switch (type) {
3645 case Primitive::kPrimInt:
3646 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003647 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003648 break;
3649 }
3650
Calin Juravle7c4954d2014-10-28 16:57:40 +00003651 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003652 if (second.IsFpuRegister()) {
3653 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3654 } else if (second.IsConstant()) {
3655 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003656 codegen_->LiteralFloatAddress(
3657 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003658 } else {
3659 DCHECK(second.IsStackSlot());
3660 __ divss(first.AsFpuRegister<XmmRegister>(),
3661 Address(CpuRegister(RSP), second.GetStackIndex()));
3662 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003663 break;
3664 }
3665
3666 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003667 if (second.IsFpuRegister()) {
3668 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3669 } else if (second.IsConstant()) {
3670 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003671 codegen_->LiteralDoubleAddress(
3672 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003673 } else {
3674 DCHECK(second.IsDoubleStackSlot());
3675 __ divsd(first.AsFpuRegister<XmmRegister>(),
3676 Address(CpuRegister(RSP), second.GetStackIndex()));
3677 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003678 break;
3679 }
3680
3681 default:
3682 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3683 }
3684}
3685
Calin Juravlebacfec32014-11-14 15:54:36 +00003686void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003687 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003688 LocationSummary* locations =
3689 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003690
3691 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003692 case Primitive::kPrimInt:
3693 case Primitive::kPrimLong: {
3694 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003695 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003696 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3697 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003698 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3699 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3700 // output and request another temp.
3701 if (rem->InputAt(1)->IsConstant()) {
3702 locations->AddTemp(Location::RequiresRegister());
3703 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003704 break;
3705 }
3706
3707 case Primitive::kPrimFloat:
3708 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003709 locations->SetInAt(0, Location::Any());
3710 locations->SetInAt(1, Location::Any());
3711 locations->SetOut(Location::RequiresFpuRegister());
3712 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003713 break;
3714 }
3715
3716 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003717 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003718 }
3719}
3720
3721void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3722 Primitive::Type type = rem->GetResultType();
3723 switch (type) {
3724 case Primitive::kPrimInt:
3725 case Primitive::kPrimLong: {
3726 GenerateDivRemIntegral(rem);
3727 break;
3728 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003729 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003730 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003731 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003732 break;
3733 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003734 default:
3735 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3736 }
3737}
3738
Calin Juravled0d48522014-11-04 16:40:20 +00003739void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003740 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3741 ? LocationSummary::kCallOnSlowPath
3742 : LocationSummary::kNoCall;
3743 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003744 locations->SetInAt(0, Location::Any());
3745 if (instruction->HasUses()) {
3746 locations->SetOut(Location::SameAsFirstInput());
3747 }
3748}
3749
3750void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003751 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003752 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3753 codegen_->AddSlowPath(slow_path);
3754
3755 LocationSummary* locations = instruction->GetLocations();
3756 Location value = locations->InAt(0);
3757
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003758 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003759 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003760 case Primitive::kPrimByte:
3761 case Primitive::kPrimChar:
3762 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003763 case Primitive::kPrimInt: {
3764 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003765 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003766 __ j(kEqual, slow_path->GetEntryLabel());
3767 } else if (value.IsStackSlot()) {
3768 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3769 __ j(kEqual, slow_path->GetEntryLabel());
3770 } else {
3771 DCHECK(value.IsConstant()) << value;
3772 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3773 __ jmp(slow_path->GetEntryLabel());
3774 }
3775 }
3776 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003777 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003778 case Primitive::kPrimLong: {
3779 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003780 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003781 __ j(kEqual, slow_path->GetEntryLabel());
3782 } else if (value.IsDoubleStackSlot()) {
3783 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3784 __ j(kEqual, slow_path->GetEntryLabel());
3785 } else {
3786 DCHECK(value.IsConstant()) << value;
3787 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3788 __ jmp(slow_path->GetEntryLabel());
3789 }
3790 }
3791 break;
3792 }
3793 default:
3794 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003795 }
Calin Juravled0d48522014-11-04 16:40:20 +00003796}
3797
Calin Juravle9aec02f2014-11-18 23:06:35 +00003798void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3799 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3800
3801 LocationSummary* locations =
3802 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3803
3804 switch (op->GetResultType()) {
3805 case Primitive::kPrimInt:
3806 case Primitive::kPrimLong: {
3807 locations->SetInAt(0, Location::RequiresRegister());
3808 // The shift count needs to be in CL.
3809 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3810 locations->SetOut(Location::SameAsFirstInput());
3811 break;
3812 }
3813 default:
3814 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3815 }
3816}
3817
3818void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3819 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3820
3821 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003822 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003823 Location second = locations->InAt(1);
3824
3825 switch (op->GetResultType()) {
3826 case Primitive::kPrimInt: {
3827 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003828 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003829 if (op->IsShl()) {
3830 __ shll(first_reg, second_reg);
3831 } else if (op->IsShr()) {
3832 __ sarl(first_reg, second_reg);
3833 } else {
3834 __ shrl(first_reg, second_reg);
3835 }
3836 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003837 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003838 if (op->IsShl()) {
3839 __ shll(first_reg, imm);
3840 } else if (op->IsShr()) {
3841 __ sarl(first_reg, imm);
3842 } else {
3843 __ shrl(first_reg, imm);
3844 }
3845 }
3846 break;
3847 }
3848 case Primitive::kPrimLong: {
3849 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003850 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003851 if (op->IsShl()) {
3852 __ shlq(first_reg, second_reg);
3853 } else if (op->IsShr()) {
3854 __ sarq(first_reg, second_reg);
3855 } else {
3856 __ shrq(first_reg, second_reg);
3857 }
3858 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003859 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003860 if (op->IsShl()) {
3861 __ shlq(first_reg, imm);
3862 } else if (op->IsShr()) {
3863 __ sarq(first_reg, imm);
3864 } else {
3865 __ shrq(first_reg, imm);
3866 }
3867 }
3868 break;
3869 }
3870 default:
3871 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003872 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003873 }
3874}
3875
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003876void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3877 LocationSummary* locations =
3878 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3879
3880 switch (ror->GetResultType()) {
3881 case Primitive::kPrimInt:
3882 case Primitive::kPrimLong: {
3883 locations->SetInAt(0, Location::RequiresRegister());
3884 // The shift count needs to be in CL (unless it is a constant).
3885 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3886 locations->SetOut(Location::SameAsFirstInput());
3887 break;
3888 }
3889 default:
3890 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3891 UNREACHABLE();
3892 }
3893}
3894
3895void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3896 LocationSummary* locations = ror->GetLocations();
3897 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3898 Location second = locations->InAt(1);
3899
3900 switch (ror->GetResultType()) {
3901 case Primitive::kPrimInt:
3902 if (second.IsRegister()) {
3903 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3904 __ rorl(first_reg, second_reg);
3905 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003906 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003907 __ rorl(first_reg, imm);
3908 }
3909 break;
3910 case Primitive::kPrimLong:
3911 if (second.IsRegister()) {
3912 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3913 __ rorq(first_reg, second_reg);
3914 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003915 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003916 __ rorq(first_reg, imm);
3917 }
3918 break;
3919 default:
3920 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3921 UNREACHABLE();
3922 }
3923}
3924
Calin Juravle9aec02f2014-11-18 23:06:35 +00003925void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3926 HandleShift(shl);
3927}
3928
3929void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3930 HandleShift(shl);
3931}
3932
3933void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3934 HandleShift(shr);
3935}
3936
3937void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3938 HandleShift(shr);
3939}
3940
3941void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3942 HandleShift(ushr);
3943}
3944
3945void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3946 HandleShift(ushr);
3947}
3948
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003949void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003950 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003951 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003952 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003953 if (instruction->IsStringAlloc()) {
3954 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3955 } else {
3956 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3957 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3958 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003959 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003960}
3961
3962void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003963 // Note: if heap poisoning is enabled, the entry point takes cares
3964 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003965 if (instruction->IsStringAlloc()) {
3966 // String is allocated through StringFactory. Call NewEmptyString entry point.
3967 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3968 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3969 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3970 __ call(Address(temp, code_offset.SizeValue()));
3971 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3972 } else {
3973 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3974 instruction,
3975 instruction->GetDexPc(),
3976 nullptr);
3977 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3978 DCHECK(!codegen_->IsLeafMethod());
3979 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003980}
3981
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003982void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3983 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003984 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003985 InvokeRuntimeCallingConvention calling_convention;
3986 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003987 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003988 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003989 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003990}
3991
3992void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3993 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003994 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3995 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003996 // Note: if heap poisoning is enabled, the entry point takes cares
3997 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003998 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3999 instruction,
4000 instruction->GetDexPc(),
4001 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004002 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004003
4004 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004005}
4006
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004007void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004008 LocationSummary* locations =
4009 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004010 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4011 if (location.IsStackSlot()) {
4012 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4013 } else if (location.IsDoubleStackSlot()) {
4014 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4015 }
4016 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004017}
4018
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004019void InstructionCodeGeneratorX86_64::VisitParameterValue(
4020 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004021 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004022}
4023
4024void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4025 LocationSummary* locations =
4026 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4027 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4028}
4029
4030void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4031 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4032 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004033}
4034
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004035void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4036 LocationSummary* locations =
4037 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4038 locations->SetInAt(0, Location::RequiresRegister());
4039 locations->SetOut(Location::RequiresRegister());
4040}
4041
4042void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4043 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004044 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004045 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004046 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004047 __ movq(locations->Out().AsRegister<CpuRegister>(),
4048 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004049 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004050 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004051 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004052 __ movq(locations->Out().AsRegister<CpuRegister>(),
4053 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4054 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004055 __ movq(locations->Out().AsRegister<CpuRegister>(),
4056 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004057 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004058}
4059
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004060void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004061 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004062 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004063 locations->SetInAt(0, Location::RequiresRegister());
4064 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004065}
4066
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004067void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4068 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004069 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4070 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004071 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004072 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004073 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004074 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004075 break;
4076
4077 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004078 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004079 break;
4080
4081 default:
4082 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4083 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004084}
4085
David Brazdil66d126e2015-04-03 16:02:44 +01004086void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4087 LocationSummary* locations =
4088 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4089 locations->SetInAt(0, Location::RequiresRegister());
4090 locations->SetOut(Location::SameAsFirstInput());
4091}
4092
4093void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004094 LocationSummary* locations = bool_not->GetLocations();
4095 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4096 locations->Out().AsRegister<CpuRegister>().AsRegister());
4097 Location out = locations->Out();
4098 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4099}
4100
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004101void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004102 LocationSummary* locations =
4103 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004104 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004105 locations->SetInAt(i, Location::Any());
4106 }
4107 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004108}
4109
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004110void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004111 LOG(FATAL) << "Unimplemented";
4112}
4113
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004114void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004115 /*
4116 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004117 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004118 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4119 */
4120 switch (kind) {
4121 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004122 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004123 break;
4124 }
4125 case MemBarrierKind::kAnyStore:
4126 case MemBarrierKind::kLoadAny:
4127 case MemBarrierKind::kStoreStore: {
4128 // nop
4129 break;
4130 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004131 case MemBarrierKind::kNTStoreStore:
4132 // Non-Temporal Store/Store needs an explicit fence.
4133 MemoryFence(/* non-temporal */ true);
4134 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004135 }
4136}
4137
4138void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4139 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4140
Roland Levillain0d5a2812015-11-13 10:07:31 +00004141 bool object_field_get_with_read_barrier =
4142 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004143 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004144 new (GetGraph()->GetArena()) LocationSummary(instruction,
4145 object_field_get_with_read_barrier ?
4146 LocationSummary::kCallOnSlowPath :
4147 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004148 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004149 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4150 locations->SetOut(Location::RequiresFpuRegister());
4151 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004152 // The output overlaps for an object field get when read barriers
4153 // are enabled: we do not want the move to overwrite the object's
4154 // location, as we need it to emit the read barrier.
4155 locations->SetOut(
4156 Location::RequiresRegister(),
4157 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004158 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004159 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4160 // We need a temporary register for the read barrier marking slow
4161 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4162 locations->AddTemp(Location::RequiresRegister());
4163 }
Calin Juravle52c48962014-12-16 17:02:57 +00004164}
4165
4166void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4167 const FieldInfo& field_info) {
4168 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4169
4170 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004171 Location base_loc = locations->InAt(0);
4172 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004173 Location out = locations->Out();
4174 bool is_volatile = field_info.IsVolatile();
4175 Primitive::Type field_type = field_info.GetFieldType();
4176 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4177
4178 switch (field_type) {
4179 case Primitive::kPrimBoolean: {
4180 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4181 break;
4182 }
4183
4184 case Primitive::kPrimByte: {
4185 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4186 break;
4187 }
4188
4189 case Primitive::kPrimShort: {
4190 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4191 break;
4192 }
4193
4194 case Primitive::kPrimChar: {
4195 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4196 break;
4197 }
4198
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004199 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004200 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4201 break;
4202 }
4203
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004204 case Primitive::kPrimNot: {
4205 // /* HeapReference<Object> */ out = *(base + offset)
4206 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4207 Location temp_loc = locations->GetTemp(0);
4208 // Note that a potential implicit null check is handled in this
4209 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4210 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4211 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4212 if (is_volatile) {
4213 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4214 }
4215 } else {
4216 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4217 codegen_->MaybeRecordImplicitNullCheck(instruction);
4218 if (is_volatile) {
4219 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4220 }
4221 // If read barriers are enabled, emit read barriers other than
4222 // Baker's using a slow path (and also unpoison the loaded
4223 // reference, if heap poisoning is enabled).
4224 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4225 }
4226 break;
4227 }
4228
Calin Juravle52c48962014-12-16 17:02:57 +00004229 case Primitive::kPrimLong: {
4230 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4231 break;
4232 }
4233
4234 case Primitive::kPrimFloat: {
4235 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4236 break;
4237 }
4238
4239 case Primitive::kPrimDouble: {
4240 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4241 break;
4242 }
4243
4244 case Primitive::kPrimVoid:
4245 LOG(FATAL) << "Unreachable type " << field_type;
4246 UNREACHABLE();
4247 }
4248
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004249 if (field_type == Primitive::kPrimNot) {
4250 // Potential implicit null checks, in the case of reference
4251 // fields, are handled in the previous switch statement.
4252 } else {
4253 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004254 }
Roland Levillain4d027112015-07-01 15:41:14 +01004255
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004256 if (is_volatile) {
4257 if (field_type == Primitive::kPrimNot) {
4258 // Memory barriers, in the case of references, are also handled
4259 // in the previous switch statement.
4260 } else {
4261 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4262 }
Roland Levillain4d027112015-07-01 15:41:14 +01004263 }
Calin Juravle52c48962014-12-16 17:02:57 +00004264}
4265
4266void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4267 const FieldInfo& field_info) {
4268 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4269
4270 LocationSummary* locations =
4271 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004272 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004273 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004274 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004275 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004276
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004277 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004278 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004279 if (is_volatile) {
4280 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4281 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4282 } else {
4283 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4284 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004285 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004286 if (is_volatile) {
4287 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4288 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4289 } else {
4290 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4291 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004292 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004293 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004294 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004295 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004296 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004297 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4298 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004299 locations->AddTemp(Location::RequiresRegister());
4300 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004301}
4302
Calin Juravle52c48962014-12-16 17:02:57 +00004303void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004304 const FieldInfo& field_info,
4305 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004306 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4307
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004308 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004309 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4310 Location value = locations->InAt(1);
4311 bool is_volatile = field_info.IsVolatile();
4312 Primitive::Type field_type = field_info.GetFieldType();
4313 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4314
4315 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004316 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004317 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004318
Mark Mendellea5af682015-10-22 17:35:49 -04004319 bool maybe_record_implicit_null_check_done = false;
4320
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004321 switch (field_type) {
4322 case Primitive::kPrimBoolean:
4323 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004324 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004325 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004326 __ movb(Address(base, offset), Immediate(v));
4327 } else {
4328 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4329 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004330 break;
4331 }
4332
4333 case Primitive::kPrimShort:
4334 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004335 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004336 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004337 __ movw(Address(base, offset), Immediate(v));
4338 } else {
4339 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4340 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004341 break;
4342 }
4343
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004344 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004345 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004346 if (value.IsConstant()) {
4347 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004348 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4349 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4350 // Note: if heap poisoning is enabled, no need to poison
4351 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004352 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004353 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004354 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4355 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4356 __ movl(temp, value.AsRegister<CpuRegister>());
4357 __ PoisonHeapReference(temp);
4358 __ movl(Address(base, offset), temp);
4359 } else {
4360 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4361 }
Mark Mendell40741f32015-04-20 22:10:34 -04004362 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004363 break;
4364 }
4365
4366 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004367 if (value.IsConstant()) {
4368 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004369 codegen_->MoveInt64ToAddress(Address(base, offset),
4370 Address(base, offset + sizeof(int32_t)),
4371 v,
4372 instruction);
4373 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004374 } else {
4375 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4376 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004377 break;
4378 }
4379
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004380 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004381 if (value.IsConstant()) {
4382 int32_t v =
4383 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4384 __ movl(Address(base, offset), Immediate(v));
4385 } else {
4386 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4387 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004388 break;
4389 }
4390
4391 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004392 if (value.IsConstant()) {
4393 int64_t v =
4394 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4395 codegen_->MoveInt64ToAddress(Address(base, offset),
4396 Address(base, offset + sizeof(int32_t)),
4397 v,
4398 instruction);
4399 maybe_record_implicit_null_check_done = true;
4400 } else {
4401 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4402 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004403 break;
4404 }
4405
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004406 case Primitive::kPrimVoid:
4407 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004408 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004409 }
Calin Juravle52c48962014-12-16 17:02:57 +00004410
Mark Mendellea5af682015-10-22 17:35:49 -04004411 if (!maybe_record_implicit_null_check_done) {
4412 codegen_->MaybeRecordImplicitNullCheck(instruction);
4413 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004414
4415 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4416 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4417 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004418 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004419 }
4420
Calin Juravle52c48962014-12-16 17:02:57 +00004421 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004422 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004423 }
4424}
4425
4426void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4427 HandleFieldSet(instruction, instruction->GetFieldInfo());
4428}
4429
4430void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004431 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004432}
4433
4434void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004435 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004436}
4437
4438void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004439 HandleFieldGet(instruction, instruction->GetFieldInfo());
4440}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004441
Calin Juravle52c48962014-12-16 17:02:57 +00004442void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4443 HandleFieldGet(instruction);
4444}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004445
Calin Juravle52c48962014-12-16 17:02:57 +00004446void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4447 HandleFieldGet(instruction, instruction->GetFieldInfo());
4448}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004449
Calin Juravle52c48962014-12-16 17:02:57 +00004450void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4451 HandleFieldSet(instruction, instruction->GetFieldInfo());
4452}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004453
Calin Juravle52c48962014-12-16 17:02:57 +00004454void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004455 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004456}
4457
Calin Juravlee460d1d2015-09-29 04:52:17 +01004458void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4459 HUnresolvedInstanceFieldGet* instruction) {
4460 FieldAccessCallingConventionX86_64 calling_convention;
4461 codegen_->CreateUnresolvedFieldLocationSummary(
4462 instruction, instruction->GetFieldType(), calling_convention);
4463}
4464
4465void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4466 HUnresolvedInstanceFieldGet* instruction) {
4467 FieldAccessCallingConventionX86_64 calling_convention;
4468 codegen_->GenerateUnresolvedFieldAccess(instruction,
4469 instruction->GetFieldType(),
4470 instruction->GetFieldIndex(),
4471 instruction->GetDexPc(),
4472 calling_convention);
4473}
4474
4475void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4476 HUnresolvedInstanceFieldSet* instruction) {
4477 FieldAccessCallingConventionX86_64 calling_convention;
4478 codegen_->CreateUnresolvedFieldLocationSummary(
4479 instruction, instruction->GetFieldType(), calling_convention);
4480}
4481
4482void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4483 HUnresolvedInstanceFieldSet* instruction) {
4484 FieldAccessCallingConventionX86_64 calling_convention;
4485 codegen_->GenerateUnresolvedFieldAccess(instruction,
4486 instruction->GetFieldType(),
4487 instruction->GetFieldIndex(),
4488 instruction->GetDexPc(),
4489 calling_convention);
4490}
4491
4492void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4493 HUnresolvedStaticFieldGet* instruction) {
4494 FieldAccessCallingConventionX86_64 calling_convention;
4495 codegen_->CreateUnresolvedFieldLocationSummary(
4496 instruction, instruction->GetFieldType(), calling_convention);
4497}
4498
4499void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4500 HUnresolvedStaticFieldGet* instruction) {
4501 FieldAccessCallingConventionX86_64 calling_convention;
4502 codegen_->GenerateUnresolvedFieldAccess(instruction,
4503 instruction->GetFieldType(),
4504 instruction->GetFieldIndex(),
4505 instruction->GetDexPc(),
4506 calling_convention);
4507}
4508
4509void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4510 HUnresolvedStaticFieldSet* instruction) {
4511 FieldAccessCallingConventionX86_64 calling_convention;
4512 codegen_->CreateUnresolvedFieldLocationSummary(
4513 instruction, instruction->GetFieldType(), calling_convention);
4514}
4515
4516void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4517 HUnresolvedStaticFieldSet* instruction) {
4518 FieldAccessCallingConventionX86_64 calling_convention;
4519 codegen_->GenerateUnresolvedFieldAccess(instruction,
4520 instruction->GetFieldType(),
4521 instruction->GetFieldIndex(),
4522 instruction->GetDexPc(),
4523 calling_convention);
4524}
4525
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004526void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004527 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4528 ? LocationSummary::kCallOnSlowPath
4529 : LocationSummary::kNoCall;
4530 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4531 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004532 ? Location::RequiresRegister()
4533 : Location::Any();
4534 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004535 if (instruction->HasUses()) {
4536 locations->SetOut(Location::SameAsFirstInput());
4537 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004538}
4539
Calin Juravle2ae48182016-03-16 14:05:09 +00004540void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4541 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004542 return;
4543 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004544 LocationSummary* locations = instruction->GetLocations();
4545 Location obj = locations->InAt(0);
4546
4547 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004548 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004549}
4550
Calin Juravle2ae48182016-03-16 14:05:09 +00004551void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004552 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004553 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004554
4555 LocationSummary* locations = instruction->GetLocations();
4556 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004557
4558 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004559 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004560 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004561 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004562 } else {
4563 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004564 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004565 __ jmp(slow_path->GetEntryLabel());
4566 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004567 }
4568 __ j(kEqual, slow_path->GetEntryLabel());
4569}
4570
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004571void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004572 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004573}
4574
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004575void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004576 bool object_array_get_with_read_barrier =
4577 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004578 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004579 new (GetGraph()->GetArena()) LocationSummary(instruction,
4580 object_array_get_with_read_barrier ?
4581 LocationSummary::kCallOnSlowPath :
4582 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004583 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004584 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004585 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4586 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4587 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004588 // The output overlaps for an object array get when read barriers
4589 // are enabled: we do not want the move to overwrite the array's
4590 // location, as we need it to emit the read barrier.
4591 locations->SetOut(
4592 Location::RequiresRegister(),
4593 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004594 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004595 // We need a temporary register for the read barrier marking slow
4596 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4597 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4598 locations->AddTemp(Location::RequiresRegister());
4599 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004600}
4601
4602void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4603 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004604 Location obj_loc = locations->InAt(0);
4605 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004606 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004607 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004608 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004609
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004610 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004611 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004612 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004613 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004614 if (index.IsConstant()) {
4615 __ movzxb(out, Address(obj,
4616 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4617 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004618 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004619 }
4620 break;
4621 }
4622
4623 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004624 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004625 if (index.IsConstant()) {
4626 __ movsxb(out, Address(obj,
4627 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4628 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004629 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004630 }
4631 break;
4632 }
4633
4634 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004635 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004636 if (index.IsConstant()) {
4637 __ movsxw(out, Address(obj,
4638 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4639 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004640 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004641 }
4642 break;
4643 }
4644
4645 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004646 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004647 if (index.IsConstant()) {
4648 __ movzxw(out, Address(obj,
4649 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4650 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004651 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004652 }
4653 break;
4654 }
4655
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004656 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004657 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004658 if (index.IsConstant()) {
4659 __ movl(out, Address(obj,
4660 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4661 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004662 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004663 }
4664 break;
4665 }
4666
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004667 case Primitive::kPrimNot: {
4668 static_assert(
4669 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4670 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004671 // /* HeapReference<Object> */ out =
4672 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4673 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4674 Location temp = locations->GetTemp(0);
4675 // Note that a potential implicit null check is handled in this
4676 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4677 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4678 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4679 } else {
4680 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4681 if (index.IsConstant()) {
4682 uint32_t offset =
4683 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4684 __ movl(out, Address(obj, offset));
4685 codegen_->MaybeRecordImplicitNullCheck(instruction);
4686 // If read barriers are enabled, emit read barriers other than
4687 // Baker's using a slow path (and also unpoison the loaded
4688 // reference, if heap poisoning is enabled).
4689 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4690 } else {
4691 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4692 codegen_->MaybeRecordImplicitNullCheck(instruction);
4693 // If read barriers are enabled, emit read barriers other than
4694 // Baker's using a slow path (and also unpoison the loaded
4695 // reference, if heap poisoning is enabled).
4696 codegen_->MaybeGenerateReadBarrierSlow(
4697 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4698 }
4699 }
4700 break;
4701 }
4702
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004703 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004704 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004705 if (index.IsConstant()) {
4706 __ movq(out, Address(obj,
4707 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4708 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004709 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004710 }
4711 break;
4712 }
4713
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004714 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004715 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004716 if (index.IsConstant()) {
4717 __ movss(out, Address(obj,
4718 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4719 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004720 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004721 }
4722 break;
4723 }
4724
4725 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004726 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004727 if (index.IsConstant()) {
4728 __ movsd(out, Address(obj,
4729 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4730 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004731 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004732 }
4733 break;
4734 }
4735
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004736 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004737 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004738 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004739 }
Roland Levillain4d027112015-07-01 15:41:14 +01004740
4741 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004742 // Potential implicit null checks, in the case of reference
4743 // arrays, are handled in the previous switch statement.
4744 } else {
4745 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004746 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004747}
4748
4749void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004750 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004751
4752 bool needs_write_barrier =
4753 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004754 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004755 bool object_array_set_with_read_barrier =
4756 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004757
Nicolas Geoffray39468442014-09-02 15:17:15 +01004758 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004759 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004760 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004761 LocationSummary::kCallOnSlowPath :
4762 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004763
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004764 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004765 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4766 if (Primitive::IsFloatingPointType(value_type)) {
4767 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004768 } else {
4769 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4770 }
4771
4772 if (needs_write_barrier) {
4773 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004774
4775 // This first temporary register is possibly used for heap
4776 // reference poisoning and/or read barrier emission too.
4777 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004778 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004779 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004780}
4781
4782void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4783 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004784 Location array_loc = locations->InAt(0);
4785 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004786 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004787 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004788 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004789 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004790 bool needs_write_barrier =
4791 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004792 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4793 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4794 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004795
4796 switch (value_type) {
4797 case Primitive::kPrimBoolean:
4798 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004799 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4800 Address address = index.IsConstant()
4801 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4802 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4803 if (value.IsRegister()) {
4804 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004805 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004806 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004807 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004808 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004809 break;
4810 }
4811
4812 case Primitive::kPrimShort:
4813 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004814 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4815 Address address = index.IsConstant()
4816 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4817 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4818 if (value.IsRegister()) {
4819 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004820 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004821 DCHECK(value.IsConstant()) << value;
4822 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004823 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004824 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004825 break;
4826 }
4827
4828 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004829 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4830 Address address = index.IsConstant()
4831 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4832 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004833
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004834 if (!value.IsRegister()) {
4835 // Just setting null.
4836 DCHECK(instruction->InputAt(2)->IsNullConstant());
4837 DCHECK(value.IsConstant()) << value;
4838 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004839 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004840 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004841 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004842 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004843 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004844
4845 DCHECK(needs_write_barrier);
4846 CpuRegister register_value = value.AsRegister<CpuRegister>();
4847 NearLabel done, not_null, do_put;
4848 SlowPathCode* slow_path = nullptr;
4849 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004850 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004851 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4852 codegen_->AddSlowPath(slow_path);
4853 if (instruction->GetValueCanBeNull()) {
4854 __ testl(register_value, register_value);
4855 __ j(kNotEqual, &not_null);
4856 __ movl(address, Immediate(0));
4857 codegen_->MaybeRecordImplicitNullCheck(instruction);
4858 __ jmp(&done);
4859 __ Bind(&not_null);
4860 }
4861
Roland Levillain0d5a2812015-11-13 10:07:31 +00004862 if (kEmitCompilerReadBarrier) {
4863 // When read barriers are enabled, the type checking
4864 // instrumentation requires two read barriers:
4865 //
4866 // __ movl(temp2, temp);
4867 // // /* HeapReference<Class> */ temp = temp->component_type_
4868 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004869 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004870 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4871 //
4872 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4873 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004874 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004875 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4876 //
4877 // __ cmpl(temp, temp2);
4878 //
4879 // However, the second read barrier may trash `temp`, as it
4880 // is a temporary register, and as such would not be saved
4881 // along with live registers before calling the runtime (nor
4882 // restored afterwards). So in this case, we bail out and
4883 // delegate the work to the array set slow path.
4884 //
4885 // TODO: Extend the register allocator to support a new
4886 // "(locally) live temp" location so as to avoid always
4887 // going into the slow path when read barriers are enabled.
4888 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004889 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004890 // /* HeapReference<Class> */ temp = array->klass_
4891 __ movl(temp, Address(array, class_offset));
4892 codegen_->MaybeRecordImplicitNullCheck(instruction);
4893 __ MaybeUnpoisonHeapReference(temp);
4894
4895 // /* HeapReference<Class> */ temp = temp->component_type_
4896 __ movl(temp, Address(temp, component_offset));
4897 // If heap poisoning is enabled, no need to unpoison `temp`
4898 // nor the object reference in `register_value->klass`, as
4899 // we are comparing two poisoned references.
4900 __ cmpl(temp, Address(register_value, class_offset));
4901
4902 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4903 __ j(kEqual, &do_put);
4904 // If heap poisoning is enabled, the `temp` reference has
4905 // not been unpoisoned yet; unpoison it now.
4906 __ MaybeUnpoisonHeapReference(temp);
4907
4908 // /* HeapReference<Class> */ temp = temp->super_class_
4909 __ movl(temp, Address(temp, super_offset));
4910 // If heap poisoning is enabled, no need to unpoison
4911 // `temp`, as we are comparing against null below.
4912 __ testl(temp, temp);
4913 __ j(kNotEqual, slow_path->GetEntryLabel());
4914 __ Bind(&do_put);
4915 } else {
4916 __ j(kNotEqual, slow_path->GetEntryLabel());
4917 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004918 }
4919 }
4920
4921 if (kPoisonHeapReferences) {
4922 __ movl(temp, register_value);
4923 __ PoisonHeapReference(temp);
4924 __ movl(address, temp);
4925 } else {
4926 __ movl(address, register_value);
4927 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004928 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004929 codegen_->MaybeRecordImplicitNullCheck(instruction);
4930 }
4931
4932 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4933 codegen_->MarkGCCard(
4934 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4935 __ Bind(&done);
4936
4937 if (slow_path != nullptr) {
4938 __ Bind(slow_path->GetExitLabel());
4939 }
4940
4941 break;
4942 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004943
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004944 case Primitive::kPrimInt: {
4945 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4946 Address address = index.IsConstant()
4947 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4948 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4949 if (value.IsRegister()) {
4950 __ movl(address, value.AsRegister<CpuRegister>());
4951 } else {
4952 DCHECK(value.IsConstant()) << value;
4953 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4954 __ movl(address, Immediate(v));
4955 }
4956 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004957 break;
4958 }
4959
4960 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004961 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4962 Address address = index.IsConstant()
4963 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4964 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4965 if (value.IsRegister()) {
4966 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004967 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004968 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004969 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004970 Address address_high = index.IsConstant()
4971 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4972 offset + sizeof(int32_t))
4973 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4974 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004975 }
4976 break;
4977 }
4978
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004979 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004980 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4981 Address address = index.IsConstant()
4982 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4983 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004984 if (value.IsFpuRegister()) {
4985 __ movss(address, value.AsFpuRegister<XmmRegister>());
4986 } else {
4987 DCHECK(value.IsConstant());
4988 int32_t v =
4989 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4990 __ movl(address, Immediate(v));
4991 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004992 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004993 break;
4994 }
4995
4996 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004997 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4998 Address address = index.IsConstant()
4999 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
5000 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005001 if (value.IsFpuRegister()) {
5002 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5003 codegen_->MaybeRecordImplicitNullCheck(instruction);
5004 } else {
5005 int64_t v =
5006 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5007 Address address_high = index.IsConstant()
5008 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
5009 offset + sizeof(int32_t))
5010 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
5011 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5012 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005013 break;
5014 }
5015
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005016 case Primitive::kPrimVoid:
5017 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005018 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005019 }
5020}
5021
5022void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005023 LocationSummary* locations =
5024 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005025 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005026 if (!instruction->IsEmittedAtUseSite()) {
5027 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5028 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005029}
5030
5031void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005032 if (instruction->IsEmittedAtUseSite()) {
5033 return;
5034 }
5035
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005036 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005037 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005038 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5039 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005040 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005041 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005042}
5043
5044void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005045 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5046 ? LocationSummary::kCallOnSlowPath
5047 : LocationSummary::kNoCall;
5048 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005049 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005050 HInstruction* length = instruction->InputAt(1);
5051 if (!length->IsEmittedAtUseSite()) {
5052 locations->SetInAt(1, Location::RegisterOrConstant(length));
5053 }
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005054 if (instruction->HasUses()) {
5055 locations->SetOut(Location::SameAsFirstInput());
5056 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005057}
5058
5059void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5060 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005061 Location index_loc = locations->InAt(0);
5062 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005063 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005064
Mark Mendell99dbd682015-04-22 16:18:52 -04005065 if (length_loc.IsConstant()) {
5066 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5067 if (index_loc.IsConstant()) {
5068 // BCE will remove the bounds check if we are guarenteed to pass.
5069 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5070 if (index < 0 || index >= length) {
5071 codegen_->AddSlowPath(slow_path);
5072 __ jmp(slow_path->GetEntryLabel());
5073 } else {
5074 // Some optimization after BCE may have generated this, and we should not
5075 // generate a bounds check if it is a valid range.
5076 }
5077 return;
5078 }
5079
5080 // We have to reverse the jump condition because the length is the constant.
5081 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5082 __ cmpl(index_reg, Immediate(length));
5083 codegen_->AddSlowPath(slow_path);
5084 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005085 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005086 HInstruction* array_length = instruction->InputAt(1);
5087 if (array_length->IsEmittedAtUseSite()) {
5088 // Address the length field in the array.
5089 DCHECK(array_length->IsArrayLength());
5090 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5091 Location array_loc = array_length->GetLocations()->InAt(0);
5092 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
5093 if (index_loc.IsConstant()) {
5094 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5095 __ cmpl(array_len, Immediate(value));
5096 } else {
5097 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5098 }
5099 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendell99dbd682015-04-22 16:18:52 -04005100 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005101 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5102 if (index_loc.IsConstant()) {
5103 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5104 __ cmpl(length, Immediate(value));
5105 } else {
5106 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5107 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005108 }
5109 codegen_->AddSlowPath(slow_path);
5110 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005111 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005112}
5113
5114void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5115 CpuRegister card,
5116 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005117 CpuRegister value,
5118 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005119 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005120 if (value_can_be_null) {
5121 __ testl(value, value);
5122 __ j(kEqual, &is_null);
5123 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005124 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5125 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005126 __ movq(temp, object);
5127 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005128 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005129 if (value_can_be_null) {
5130 __ Bind(&is_null);
5131 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005132}
5133
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005134void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005135 LOG(FATAL) << "Unimplemented";
5136}
5137
5138void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005139 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5140}
5141
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005142void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5143 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5144}
5145
5146void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005147 HBasicBlock* block = instruction->GetBlock();
5148 if (block->GetLoopInformation() != nullptr) {
5149 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5150 // The back edge will generate the suspend check.
5151 return;
5152 }
5153 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5154 // The goto will generate the suspend check.
5155 return;
5156 }
5157 GenerateSuspendCheck(instruction, nullptr);
5158}
5159
5160void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5161 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005162 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005163 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5164 if (slow_path == nullptr) {
5165 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5166 instruction->SetSlowPath(slow_path);
5167 codegen_->AddSlowPath(slow_path);
5168 if (successor != nullptr) {
5169 DCHECK(successor->IsLoopHeader());
5170 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5171 }
5172 } else {
5173 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5174 }
5175
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005176 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5177 /* no_rip */ true),
5178 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005179 if (successor == nullptr) {
5180 __ j(kNotEqual, slow_path->GetEntryLabel());
5181 __ Bind(slow_path->GetReturnLabel());
5182 } else {
5183 __ j(kEqual, codegen_->GetLabelOf(successor));
5184 __ jmp(slow_path->GetEntryLabel());
5185 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005186}
5187
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005188X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5189 return codegen_->GetAssembler();
5190}
5191
5192void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005193 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005194 Location source = move->GetSource();
5195 Location destination = move->GetDestination();
5196
5197 if (source.IsRegister()) {
5198 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005199 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005200 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005201 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005202 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005203 } else {
5204 DCHECK(destination.IsDoubleStackSlot());
5205 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005206 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005207 }
5208 } else if (source.IsStackSlot()) {
5209 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005210 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005211 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005212 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005213 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005214 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005215 } else {
5216 DCHECK(destination.IsStackSlot());
5217 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5218 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5219 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005220 } else if (source.IsDoubleStackSlot()) {
5221 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005222 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005223 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005224 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005225 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5226 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005227 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005228 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005229 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5230 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5231 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005232 } else if (source.IsConstant()) {
5233 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005234 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5235 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005236 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005237 if (value == 0) {
5238 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5239 } else {
5240 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5241 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005242 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005243 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005244 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005245 }
5246 } else if (constant->IsLongConstant()) {
5247 int64_t value = constant->AsLongConstant()->GetValue();
5248 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005249 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005250 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005251 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005252 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005253 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005254 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005255 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005256 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005257 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005258 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005259 } else {
5260 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005261 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005262 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5263 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005264 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005265 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005266 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005267 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005268 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005269 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005270 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005271 } else {
5272 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005273 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005274 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005275 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005276 } else if (source.IsFpuRegister()) {
5277 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005278 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005279 } else if (destination.IsStackSlot()) {
5280 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005281 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005282 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005283 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005284 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005285 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005286 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005287 }
5288}
5289
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005290void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005291 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005292 __ movl(Address(CpuRegister(RSP), mem), reg);
5293 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005294}
5295
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005296void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005297 ScratchRegisterScope ensure_scratch(
5298 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5299
5300 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5301 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5302 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5303 Address(CpuRegister(RSP), mem2 + stack_offset));
5304 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5305 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5306 CpuRegister(ensure_scratch.GetRegister()));
5307}
5308
Mark Mendell8a1c7282015-06-29 15:41:28 -04005309void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5310 __ movq(CpuRegister(TMP), reg1);
5311 __ movq(reg1, reg2);
5312 __ movq(reg2, CpuRegister(TMP));
5313}
5314
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005315void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5316 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5317 __ movq(Address(CpuRegister(RSP), mem), reg);
5318 __ movq(reg, CpuRegister(TMP));
5319}
5320
5321void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5322 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005323 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005324
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005325 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5326 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5327 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5328 Address(CpuRegister(RSP), mem2 + stack_offset));
5329 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5330 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5331 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005332}
5333
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005334void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5335 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5336 __ movss(Address(CpuRegister(RSP), mem), reg);
5337 __ movd(reg, CpuRegister(TMP));
5338}
5339
5340void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5341 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5342 __ movsd(Address(CpuRegister(RSP), mem), reg);
5343 __ movd(reg, CpuRegister(TMP));
5344}
5345
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005346void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005347 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005348 Location source = move->GetSource();
5349 Location destination = move->GetDestination();
5350
5351 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005352 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005353 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005354 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005355 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005356 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005357 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005358 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5359 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005360 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005361 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005362 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005363 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5364 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005365 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005366 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5367 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5368 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005369 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005370 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005371 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005372 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005373 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005374 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005375 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005376 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005377 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005378 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005379 }
5380}
5381
5382
5383void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5384 __ pushq(CpuRegister(reg));
5385}
5386
5387
5388void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5389 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005390}
5391
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005392void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005393 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005394 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5395 Immediate(mirror::Class::kStatusInitialized));
5396 __ j(kLess, slow_path->GetEntryLabel());
5397 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005398 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005399}
5400
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005401HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5402 HLoadClass::LoadKind desired_class_load_kind) {
5403 if (kEmitCompilerReadBarrier) {
5404 switch (desired_class_load_kind) {
5405 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5406 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5407 case HLoadClass::LoadKind::kBootImageAddress:
5408 // TODO: Implement for read barrier.
5409 return HLoadClass::LoadKind::kDexCacheViaMethod;
5410 default:
5411 break;
5412 }
5413 }
5414 switch (desired_class_load_kind) {
5415 case HLoadClass::LoadKind::kReferrersClass:
5416 break;
5417 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5418 DCHECK(!GetCompilerOptions().GetCompilePic());
5419 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5420 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5421 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5422 DCHECK(GetCompilerOptions().GetCompilePic());
5423 break;
5424 case HLoadClass::LoadKind::kBootImageAddress:
5425 break;
5426 case HLoadClass::LoadKind::kDexCacheAddress:
5427 DCHECK(Runtime::Current()->UseJitCompilation());
5428 break;
5429 case HLoadClass::LoadKind::kDexCachePcRelative:
5430 DCHECK(!Runtime::Current()->UseJitCompilation());
5431 break;
5432 case HLoadClass::LoadKind::kDexCacheViaMethod:
5433 break;
5434 }
5435 return desired_class_load_kind;
5436}
5437
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005438void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005439 if (cls->NeedsAccessCheck()) {
5440 InvokeRuntimeCallingConvention calling_convention;
5441 CodeGenerator::CreateLoadClassLocationSummary(
5442 cls,
5443 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5444 Location::RegisterLocation(RAX),
5445 /* code_generator_supports_read_barrier */ true);
5446 return;
5447 }
5448
5449 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
5450 ? LocationSummary::kCallOnSlowPath
5451 : LocationSummary::kNoCall;
5452 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
5453 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5454 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5455 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5456 locations->SetInAt(0, Location::RequiresRegister());
5457 }
5458 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005459}
5460
5461void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005462 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005463 if (cls->NeedsAccessCheck()) {
5464 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5465 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5466 cls,
5467 cls->GetDexPc(),
5468 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005469 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005470 return;
5471 }
5472
Roland Levillain0d5a2812015-11-13 10:07:31 +00005473 Location out_loc = locations->Out();
5474 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005475
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005476 bool generate_null_check = false;
5477 switch (cls->GetLoadKind()) {
5478 case HLoadClass::LoadKind::kReferrersClass: {
5479 DCHECK(!cls->CanCallRuntime());
5480 DCHECK(!cls->MustGenerateClinitCheck());
5481 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5482 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5483 GenerateGcRootFieldLoad(
5484 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5485 break;
5486 }
5487 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5488 DCHECK(!kEmitCompilerReadBarrier);
5489 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5490 codegen_->RecordTypePatch(cls);
5491 break;
5492 case HLoadClass::LoadKind::kBootImageAddress: {
5493 DCHECK(!kEmitCompilerReadBarrier);
5494 DCHECK_NE(cls->GetAddress(), 0u);
5495 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5496 __ movl(out, Immediate(address)); // Zero-extended.
5497 codegen_->RecordSimplePatch();
5498 break;
5499 }
5500 case HLoadClass::LoadKind::kDexCacheAddress: {
5501 DCHECK_NE(cls->GetAddress(), 0u);
5502 // /* GcRoot<mirror::Class> */ out = *address
5503 if (IsUint<32>(cls->GetAddress())) {
5504 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
5505 GenerateGcRootFieldLoad(cls, out_loc, address);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005506 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005507 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5508 __ movq(out, Immediate(cls->GetAddress()));
5509 GenerateGcRootFieldLoad(cls, out_loc, Address(out, 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005510 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005511 generate_null_check = !cls->IsInDexCache();
5512 break;
5513 }
5514 case HLoadClass::LoadKind::kDexCachePcRelative: {
5515 uint32_t offset = cls->GetDexCacheElementOffset();
5516 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5517 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5518 /* no_rip */ false);
5519 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5520 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label);
5521 generate_null_check = !cls->IsInDexCache();
5522 break;
5523 }
5524 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5525 // /* GcRoot<mirror::Class>[] */ out =
5526 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5527 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5528 __ movq(out,
5529 Address(current_method,
5530 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5531 // /* GcRoot<mirror::Class> */ out = out[type_index]
5532 GenerateGcRootFieldLoad(
5533 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
5534 generate_null_check = !cls->IsInDexCache();
5535 break;
5536 }
5537 default:
5538 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5539 UNREACHABLE();
5540 }
5541
5542 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5543 DCHECK(cls->CanCallRuntime());
5544 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5545 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5546 codegen_->AddSlowPath(slow_path);
5547 if (generate_null_check) {
5548 __ testl(out, out);
5549 __ j(kEqual, slow_path->GetEntryLabel());
5550 }
5551 if (cls->MustGenerateClinitCheck()) {
5552 GenerateClassInitializationCheck(slow_path, out);
5553 } else {
5554 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005555 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005556 }
5557}
5558
5559void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5560 LocationSummary* locations =
5561 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5562 locations->SetInAt(0, Location::RequiresRegister());
5563 if (check->HasUses()) {
5564 locations->SetOut(Location::SameAsFirstInput());
5565 }
5566}
5567
5568void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005569 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005570 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005571 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005572 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005573 GenerateClassInitializationCheck(slow_path,
5574 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005575}
5576
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005577HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5578 HLoadString::LoadKind desired_string_load_kind) {
5579 if (kEmitCompilerReadBarrier) {
5580 switch (desired_string_load_kind) {
5581 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5582 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5583 case HLoadString::LoadKind::kBootImageAddress:
5584 // TODO: Implement for read barrier.
5585 return HLoadString::LoadKind::kDexCacheViaMethod;
5586 default:
5587 break;
5588 }
5589 }
5590 switch (desired_string_load_kind) {
5591 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5592 DCHECK(!GetCompilerOptions().GetCompilePic());
5593 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5594 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5595 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5596 DCHECK(GetCompilerOptions().GetCompilePic());
5597 break;
5598 case HLoadString::LoadKind::kBootImageAddress:
5599 break;
5600 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005601 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005602 break;
5603 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005604 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005605 break;
5606 case HLoadString::LoadKind::kDexCacheViaMethod:
5607 break;
5608 }
5609 return desired_string_load_kind;
5610}
5611
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005612void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005613 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005614 ? LocationSummary::kCallOnSlowPath
5615 : LocationSummary::kNoCall;
5616 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005617 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5618 locations->SetInAt(0, Location::RequiresRegister());
5619 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005620 locations->SetOut(Location::RequiresRegister());
5621}
5622
5623void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005624 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005625 Location out_loc = locations->Out();
5626 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005627
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005628 switch (load->GetLoadKind()) {
5629 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5630 DCHECK(!kEmitCompilerReadBarrier);
5631 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5632 codegen_->RecordStringPatch(load);
5633 return; // No dex cache slow path.
5634 }
5635 case HLoadString::LoadKind::kBootImageAddress: {
5636 DCHECK(!kEmitCompilerReadBarrier);
5637 DCHECK_NE(load->GetAddress(), 0u);
5638 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5639 __ movl(out, Immediate(address)); // Zero-extended.
5640 codegen_->RecordSimplePatch();
5641 return; // No dex cache slow path.
5642 }
5643 case HLoadString::LoadKind::kDexCacheAddress: {
5644 DCHECK_NE(load->GetAddress(), 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005645 // /* GcRoot<mirror::String> */ out = *address
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005646 if (IsUint<32>(load->GetAddress())) {
5647 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5648 GenerateGcRootFieldLoad(load, out_loc, address);
5649 } else {
5650 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5651 __ movq(out, Immediate(load->GetAddress()));
5652 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5653 }
5654 break;
5655 }
5656 case HLoadString::LoadKind::kDexCachePcRelative: {
5657 uint32_t offset = load->GetDexCacheElementOffset();
5658 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5659 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5660 /* no_rip */ false);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005661 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005662 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5663 break;
5664 }
5665 case HLoadString::LoadKind::kDexCacheViaMethod: {
5666 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5667
5668 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5669 GenerateGcRootFieldLoad(
5670 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5671 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5672 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5673 // /* GcRoot<mirror::String> */ out = out[string_index]
5674 GenerateGcRootFieldLoad(
5675 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5676 break;
5677 }
5678 default:
5679 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5680 UNREACHABLE();
5681 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005682
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005683 if (!load->IsInDexCache()) {
5684 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5685 codegen_->AddSlowPath(slow_path);
5686 __ testl(out, out);
5687 __ j(kEqual, slow_path->GetEntryLabel());
5688 __ Bind(slow_path->GetExitLabel());
5689 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005690}
5691
David Brazdilcb1c0552015-08-04 16:22:25 +01005692static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005693 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5694 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005695}
5696
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005697void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5698 LocationSummary* locations =
5699 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5700 locations->SetOut(Location::RequiresRegister());
5701}
5702
5703void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005704 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5705}
5706
5707void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5708 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5709}
5710
5711void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5712 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005713}
5714
5715void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5716 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005717 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005718 InvokeRuntimeCallingConvention calling_convention;
5719 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5720}
5721
5722void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005723 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5724 instruction,
5725 instruction->GetDexPc(),
5726 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005727 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005728}
5729
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005730static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5731 return kEmitCompilerReadBarrier &&
5732 (kUseBakerReadBarrier ||
5733 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5734 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5735 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5736}
5737
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005738void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005739 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005740 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5741 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005742 case TypeCheckKind::kExactCheck:
5743 case TypeCheckKind::kAbstractClassCheck:
5744 case TypeCheckKind::kClassHierarchyCheck:
5745 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005746 call_kind =
5747 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005748 break;
5749 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005750 case TypeCheckKind::kUnresolvedCheck:
5751 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005752 call_kind = LocationSummary::kCallOnSlowPath;
5753 break;
5754 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005755
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005756 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005757 locations->SetInAt(0, Location::RequiresRegister());
5758 locations->SetInAt(1, Location::Any());
5759 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5760 locations->SetOut(Location::RequiresRegister());
5761 // When read barriers are enabled, we need a temporary register for
5762 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005763 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005764 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005765 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005766}
5767
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005768void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005769 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005770 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005771 Location obj_loc = locations->InAt(0);
5772 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005773 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774 Location out_loc = locations->Out();
5775 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005776 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005777 locations->GetTemp(0) :
5778 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005779 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005780 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5781 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5782 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005783 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005784 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005785
5786 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005787 // Avoid null check if we know obj is not null.
5788 if (instruction->MustDoNullCheck()) {
5789 __ testl(obj, obj);
5790 __ j(kEqual, &zero);
5791 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005792
Roland Levillain0d5a2812015-11-13 10:07:31 +00005793 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005794 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005795
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005796 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005797 case TypeCheckKind::kExactCheck: {
5798 if (cls.IsRegister()) {
5799 __ cmpl(out, cls.AsRegister<CpuRegister>());
5800 } else {
5801 DCHECK(cls.IsStackSlot()) << cls;
5802 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5803 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005804 if (zero.IsLinked()) {
5805 // Classes must be equal for the instanceof to succeed.
5806 __ j(kNotEqual, &zero);
5807 __ movl(out, Immediate(1));
5808 __ jmp(&done);
5809 } else {
5810 __ setcc(kEqual, out);
5811 // setcc only sets the low byte.
5812 __ andl(out, Immediate(1));
5813 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005814 break;
5815 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005816
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005817 case TypeCheckKind::kAbstractClassCheck: {
5818 // If the class is abstract, we eagerly fetch the super class of the
5819 // object to avoid doing a comparison we know will fail.
5820 NearLabel loop, success;
5821 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005822 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005823 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005824 __ testl(out, out);
5825 // If `out` is null, we use it for the result, and jump to `done`.
5826 __ j(kEqual, &done);
5827 if (cls.IsRegister()) {
5828 __ cmpl(out, cls.AsRegister<CpuRegister>());
5829 } else {
5830 DCHECK(cls.IsStackSlot()) << cls;
5831 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5832 }
5833 __ j(kNotEqual, &loop);
5834 __ movl(out, Immediate(1));
5835 if (zero.IsLinked()) {
5836 __ jmp(&done);
5837 }
5838 break;
5839 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005841 case TypeCheckKind::kClassHierarchyCheck: {
5842 // Walk over the class hierarchy to find a match.
5843 NearLabel loop, success;
5844 __ Bind(&loop);
5845 if (cls.IsRegister()) {
5846 __ cmpl(out, cls.AsRegister<CpuRegister>());
5847 } else {
5848 DCHECK(cls.IsStackSlot()) << cls;
5849 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5850 }
5851 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005852 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005853 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005854 __ testl(out, out);
5855 __ j(kNotEqual, &loop);
5856 // If `out` is null, we use it for the result, and jump to `done`.
5857 __ jmp(&done);
5858 __ Bind(&success);
5859 __ movl(out, Immediate(1));
5860 if (zero.IsLinked()) {
5861 __ jmp(&done);
5862 }
5863 break;
5864 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005865
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005866 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005867 // Do an exact check.
5868 NearLabel exact_check;
5869 if (cls.IsRegister()) {
5870 __ cmpl(out, cls.AsRegister<CpuRegister>());
5871 } else {
5872 DCHECK(cls.IsStackSlot()) << cls;
5873 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5874 }
5875 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005876 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005877 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005878 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005879 __ testl(out, out);
5880 // If `out` is null, we use it for the result, and jump to `done`.
5881 __ j(kEqual, &done);
5882 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5883 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005884 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005885 __ movl(out, Immediate(1));
5886 __ jmp(&done);
5887 break;
5888 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005889
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005890 case TypeCheckKind::kArrayCheck: {
5891 if (cls.IsRegister()) {
5892 __ cmpl(out, cls.AsRegister<CpuRegister>());
5893 } else {
5894 DCHECK(cls.IsStackSlot()) << cls;
5895 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5896 }
5897 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005898 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5899 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005900 codegen_->AddSlowPath(slow_path);
5901 __ j(kNotEqual, slow_path->GetEntryLabel());
5902 __ movl(out, Immediate(1));
5903 if (zero.IsLinked()) {
5904 __ jmp(&done);
5905 }
5906 break;
5907 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005908
Calin Juravle98893e12015-10-02 21:05:03 +01005909 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005910 case TypeCheckKind::kInterfaceCheck: {
5911 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005912 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005913 // cases.
5914 //
5915 // We cannot directly call the InstanceofNonTrivial runtime
5916 // entry point without resorting to a type checking slow path
5917 // here (i.e. by calling InvokeRuntime directly), as it would
5918 // require to assign fixed registers for the inputs of this
5919 // HInstanceOf instruction (following the runtime calling
5920 // convention), which might be cluttered by the potential first
5921 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005922 //
5923 // TODO: Introduce a new runtime entry point taking the object
5924 // to test (instead of its class) as argument, and let it deal
5925 // with the read barrier issues. This will let us refactor this
5926 // case of the `switch` code as it was previously (with a direct
5927 // call to the runtime not using a type checking slow path).
5928 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005929 DCHECK(locations->OnlyCallsOnSlowPath());
5930 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5931 /* is_fatal */ false);
5932 codegen_->AddSlowPath(slow_path);
5933 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005934 if (zero.IsLinked()) {
5935 __ jmp(&done);
5936 }
5937 break;
5938 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005939 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005940
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005941 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005942 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005943 __ xorl(out, out);
5944 }
5945
5946 if (done.IsLinked()) {
5947 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005948 }
5949
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005950 if (slow_path != nullptr) {
5951 __ Bind(slow_path->GetExitLabel());
5952 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005953}
5954
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005955void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005956 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5957 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005958 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5959 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005960 case TypeCheckKind::kExactCheck:
5961 case TypeCheckKind::kAbstractClassCheck:
5962 case TypeCheckKind::kClassHierarchyCheck:
5963 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005964 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5965 LocationSummary::kCallOnSlowPath :
5966 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005967 break;
5968 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005969 case TypeCheckKind::kUnresolvedCheck:
5970 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005971 call_kind = LocationSummary::kCallOnSlowPath;
5972 break;
5973 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005974 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5975 locations->SetInAt(0, Location::RequiresRegister());
5976 locations->SetInAt(1, Location::Any());
5977 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5978 locations->AddTemp(Location::RequiresRegister());
5979 // When read barriers are enabled, we need an additional temporary
5980 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005981 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005982 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005983 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005984}
5985
5986void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005987 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005988 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005989 Location obj_loc = locations->InAt(0);
5990 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005991 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005992 Location temp_loc = locations->GetTemp(0);
5993 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005994 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005995 locations->GetTemp(1) :
5996 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005997 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5998 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5999 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6000 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006001
Roland Levillain0d5a2812015-11-13 10:07:31 +00006002 bool is_type_check_slow_path_fatal =
6003 (type_check_kind == TypeCheckKind::kExactCheck ||
6004 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
6005 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
6006 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
6007 !instruction->CanThrowIntoCatchBlock();
6008 SlowPathCode* type_check_slow_path =
6009 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6010 is_type_check_slow_path_fatal);
6011 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006012
Roland Levillain0d5a2812015-11-13 10:07:31 +00006013 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006014 case TypeCheckKind::kExactCheck:
6015 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006016 NearLabel done;
6017 // Avoid null check if we know obj is not null.
6018 if (instruction->MustDoNullCheck()) {
6019 __ testl(obj, obj);
6020 __ j(kEqual, &done);
6021 }
6022
6023 // /* HeapReference<Class> */ temp = obj->klass_
6024 GenerateReferenceLoadTwoRegisters(
6025 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6026
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006027 if (cls.IsRegister()) {
6028 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6029 } else {
6030 DCHECK(cls.IsStackSlot()) << cls;
6031 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6032 }
6033 // Jump to slow path for throwing the exception or doing a
6034 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006035 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006036 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006037 break;
6038 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006039
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006040 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006041 NearLabel done;
6042 // Avoid null check if we know obj is not null.
6043 if (instruction->MustDoNullCheck()) {
6044 __ testl(obj, obj);
6045 __ j(kEqual, &done);
6046 }
6047
6048 // /* HeapReference<Class> */ temp = obj->klass_
6049 GenerateReferenceLoadTwoRegisters(
6050 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6051
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006052 // If the class is abstract, we eagerly fetch the super class of the
6053 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006054 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006055 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006056 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006057 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006058
6059 // If the class reference currently in `temp` is not null, jump
6060 // to the `compare_classes` label to compare it with the checked
6061 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006062 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006063 __ j(kNotEqual, &compare_classes);
6064 // Otherwise, jump to the slow path to throw the exception.
6065 //
6066 // But before, move back the object's class into `temp` before
6067 // going into the slow path, as it has been overwritten in the
6068 // meantime.
6069 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006070 GenerateReferenceLoadTwoRegisters(
6071 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006072 __ jmp(type_check_slow_path->GetEntryLabel());
6073
6074 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006075 if (cls.IsRegister()) {
6076 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6077 } else {
6078 DCHECK(cls.IsStackSlot()) << cls;
6079 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6080 }
6081 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006082 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006083 break;
6084 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006085
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006086 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006087 NearLabel done;
6088 // Avoid null check if we know obj is not null.
6089 if (instruction->MustDoNullCheck()) {
6090 __ testl(obj, obj);
6091 __ j(kEqual, &done);
6092 }
6093
6094 // /* HeapReference<Class> */ temp = obj->klass_
6095 GenerateReferenceLoadTwoRegisters(
6096 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6097
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006098 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006099 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006100 __ Bind(&loop);
6101 if (cls.IsRegister()) {
6102 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6103 } else {
6104 DCHECK(cls.IsStackSlot()) << cls;
6105 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6106 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006107 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006108
Roland Levillain0d5a2812015-11-13 10:07:31 +00006109 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006110 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006111
6112 // If the class reference currently in `temp` is not null, jump
6113 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006114 __ testl(temp, temp);
6115 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006116 // Otherwise, jump to the slow path to throw the exception.
6117 //
6118 // But before, move back the object's class into `temp` before
6119 // going into the slow path, as it has been overwritten in the
6120 // meantime.
6121 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006122 GenerateReferenceLoadTwoRegisters(
6123 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006124 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006125 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006126 break;
6127 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006128
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006129 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006130 // We cannot use a NearLabel here, as its range might be too
6131 // short in some cases when read barriers are enabled. This has
6132 // been observed for instance when the code emitted for this
6133 // case uses high x86-64 registers (R8-R15).
6134 Label done;
6135 // Avoid null check if we know obj is not null.
6136 if (instruction->MustDoNullCheck()) {
6137 __ testl(obj, obj);
6138 __ j(kEqual, &done);
6139 }
6140
6141 // /* HeapReference<Class> */ temp = obj->klass_
6142 GenerateReferenceLoadTwoRegisters(
6143 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6144
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006145 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006146 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006147 if (cls.IsRegister()) {
6148 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6149 } else {
6150 DCHECK(cls.IsStackSlot()) << cls;
6151 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6152 }
6153 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006154
6155 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006156 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006157 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006158
6159 // If the component type is not null (i.e. the object is indeed
6160 // an array), jump to label `check_non_primitive_component_type`
6161 // to further check that this component type is not a primitive
6162 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006163 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006164 __ j(kNotEqual, &check_non_primitive_component_type);
6165 // Otherwise, jump to the slow path to throw the exception.
6166 //
6167 // But before, move back the object's class into `temp` before
6168 // going into the slow path, as it has been overwritten in the
6169 // meantime.
6170 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006171 GenerateReferenceLoadTwoRegisters(
6172 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006173 __ jmp(type_check_slow_path->GetEntryLabel());
6174
6175 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006176 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006177 __ j(kEqual, &done);
6178 // Same comment as above regarding `temp` and the slow path.
6179 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006180 GenerateReferenceLoadTwoRegisters(
6181 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006182 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006183 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006184 break;
6185 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006186
Calin Juravle98893e12015-10-02 21:05:03 +01006187 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006188 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006189 NearLabel done;
6190 // Avoid null check if we know obj is not null.
6191 if (instruction->MustDoNullCheck()) {
6192 __ testl(obj, obj);
6193 __ j(kEqual, &done);
6194 }
6195
6196 // /* HeapReference<Class> */ temp = obj->klass_
6197 GenerateReferenceLoadTwoRegisters(
6198 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6199
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006200 // We always go into the type check slow path for the unresolved
6201 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006202 //
6203 // We cannot directly call the CheckCast runtime entry point
6204 // without resorting to a type checking slow path here (i.e. by
6205 // calling InvokeRuntime directly), as it would require to
6206 // assign fixed registers for the inputs of this HInstanceOf
6207 // instruction (following the runtime calling convention), which
6208 // might be cluttered by the potential first read barrier
6209 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006210 //
6211 // TODO: Introduce a new runtime entry point taking the object
6212 // to test (instead of its class) as argument, and let it deal
6213 // with the read barrier issues. This will let us refactor this
6214 // case of the `switch` code as it was previously (with a direct
6215 // call to the runtime not using a type checking slow path).
6216 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006217 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006218 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006219 break;
6220 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006221
Roland Levillain0d5a2812015-11-13 10:07:31 +00006222 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006223}
6224
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006225void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6226 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006227 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006228 InvokeRuntimeCallingConvention calling_convention;
6229 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6230}
6231
6232void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006233 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6234 : QUICK_ENTRY_POINT(pUnlockObject),
6235 instruction,
6236 instruction->GetDexPc(),
6237 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006238 if (instruction->IsEnter()) {
6239 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6240 } else {
6241 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6242 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006243}
6244
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006245void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6246void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6247void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6248
6249void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6250 LocationSummary* locations =
6251 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6252 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6253 || instruction->GetResultType() == Primitive::kPrimLong);
6254 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006255 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006256 locations->SetOut(Location::SameAsFirstInput());
6257}
6258
6259void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6260 HandleBitwiseOperation(instruction);
6261}
6262
6263void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6264 HandleBitwiseOperation(instruction);
6265}
6266
6267void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6268 HandleBitwiseOperation(instruction);
6269}
6270
6271void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6272 LocationSummary* locations = instruction->GetLocations();
6273 Location first = locations->InAt(0);
6274 Location second = locations->InAt(1);
6275 DCHECK(first.Equals(locations->Out()));
6276
6277 if (instruction->GetResultType() == Primitive::kPrimInt) {
6278 if (second.IsRegister()) {
6279 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006280 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006281 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006282 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006283 } else {
6284 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006285 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006286 }
6287 } else if (second.IsConstant()) {
6288 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6289 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006290 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006291 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006292 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006293 } else {
6294 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006295 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006296 }
6297 } else {
6298 Address address(CpuRegister(RSP), second.GetStackIndex());
6299 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006300 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006301 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006302 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006303 } else {
6304 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006305 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006306 }
6307 }
6308 } else {
6309 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006310 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6311 bool second_is_constant = false;
6312 int64_t value = 0;
6313 if (second.IsConstant()) {
6314 second_is_constant = true;
6315 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006316 }
Mark Mendell40741f32015-04-20 22:10:34 -04006317 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006318
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006319 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006320 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006321 if (is_int32_value) {
6322 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6323 } else {
6324 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6325 }
6326 } else if (second.IsDoubleStackSlot()) {
6327 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006328 } else {
6329 __ andq(first_reg, second.AsRegister<CpuRegister>());
6330 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006331 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006332 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006333 if (is_int32_value) {
6334 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6335 } else {
6336 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6337 }
6338 } else if (second.IsDoubleStackSlot()) {
6339 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006340 } else {
6341 __ orq(first_reg, second.AsRegister<CpuRegister>());
6342 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006343 } else {
6344 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006345 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006346 if (is_int32_value) {
6347 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6348 } else {
6349 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6350 }
6351 } else if (second.IsDoubleStackSlot()) {
6352 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006353 } else {
6354 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6355 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006356 }
6357 }
6358}
6359
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006360void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6361 Location out,
6362 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006363 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006364 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6365 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006366 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006367 if (kUseBakerReadBarrier) {
6368 // Load with fast path based Baker's read barrier.
6369 // /* HeapReference<Object> */ out = *(out + offset)
6370 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006371 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006372 } else {
6373 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006374 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006375 // in the following move operation, as we will need it for the
6376 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006377 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006378 // /* HeapReference<Object> */ out = *(out + offset)
6379 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006380 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006381 }
6382 } else {
6383 // Plain load with no read barrier.
6384 // /* HeapReference<Object> */ out = *(out + offset)
6385 __ movl(out_reg, Address(out_reg, offset));
6386 __ MaybeUnpoisonHeapReference(out_reg);
6387 }
6388}
6389
6390void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6391 Location out,
6392 Location obj,
6393 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006394 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006395 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6396 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6397 if (kEmitCompilerReadBarrier) {
6398 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006399 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006400 // Load with fast path based Baker's read barrier.
6401 // /* HeapReference<Object> */ out = *(obj + offset)
6402 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006403 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006404 } else {
6405 // Load with slow path based read barrier.
6406 // /* HeapReference<Object> */ out = *(obj + offset)
6407 __ movl(out_reg, Address(obj_reg, offset));
6408 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6409 }
6410 } else {
6411 // Plain load with no read barrier.
6412 // /* HeapReference<Object> */ out = *(obj + offset)
6413 __ movl(out_reg, Address(obj_reg, offset));
6414 __ MaybeUnpoisonHeapReference(out_reg);
6415 }
6416}
6417
6418void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6419 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006420 const Address& address,
6421 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006422 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6423 if (kEmitCompilerReadBarrier) {
6424 if (kUseBakerReadBarrier) {
6425 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6426 // Baker's read barrier are used:
6427 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006428 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006429 // if (Thread::Current()->GetIsGcMarking()) {
6430 // root = ReadBarrier::Mark(root)
6431 // }
6432
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006433 // /* GcRoot<mirror::Object> */ root = *address
6434 __ movl(root_reg, address);
6435 if (fixup_label != nullptr) {
6436 __ Bind(fixup_label);
6437 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006438 static_assert(
6439 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6440 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6441 "have different sizes.");
6442 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6443 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6444 "have different sizes.");
6445
6446 // Slow path used to mark the GC root `root`.
6447 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006448 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006449 codegen_->AddSlowPath(slow_path);
6450
6451 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6452 /* no_rip */ true),
6453 Immediate(0));
6454 __ j(kNotEqual, slow_path->GetEntryLabel());
6455 __ Bind(slow_path->GetExitLabel());
6456 } else {
6457 // GC root loaded through a slow path for read barriers other
6458 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006459 // /* GcRoot<mirror::Object>* */ root = address
6460 __ leaq(root_reg, address);
6461 if (fixup_label != nullptr) {
6462 __ Bind(fixup_label);
6463 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006464 // /* mirror::Object* */ root = root->Read()
6465 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6466 }
6467 } else {
6468 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006469 // /* GcRoot<mirror::Object> */ root = *address
6470 __ movl(root_reg, address);
6471 if (fixup_label != nullptr) {
6472 __ Bind(fixup_label);
6473 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006474 // Note that GC roots are not affected by heap poisoning, thus we
6475 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006476 }
6477}
6478
6479void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6480 Location ref,
6481 CpuRegister obj,
6482 uint32_t offset,
6483 Location temp,
6484 bool needs_null_check) {
6485 DCHECK(kEmitCompilerReadBarrier);
6486 DCHECK(kUseBakerReadBarrier);
6487
6488 // /* HeapReference<Object> */ ref = *(obj + offset)
6489 Address src(obj, offset);
6490 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6491}
6492
6493void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6494 Location ref,
6495 CpuRegister obj,
6496 uint32_t data_offset,
6497 Location index,
6498 Location temp,
6499 bool needs_null_check) {
6500 DCHECK(kEmitCompilerReadBarrier);
6501 DCHECK(kUseBakerReadBarrier);
6502
Roland Levillain3d312422016-06-23 13:53:42 +01006503 static_assert(
6504 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6505 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006506 // /* HeapReference<Object> */ ref =
6507 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6508 Address src = index.IsConstant() ?
6509 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6510 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6511 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6512}
6513
6514void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6515 Location ref,
6516 CpuRegister obj,
6517 const Address& src,
6518 Location temp,
6519 bool needs_null_check) {
6520 DCHECK(kEmitCompilerReadBarrier);
6521 DCHECK(kUseBakerReadBarrier);
6522
6523 // In slow path based read barriers, the read barrier call is
6524 // inserted after the original load. However, in fast path based
6525 // Baker's read barriers, we need to perform the load of
6526 // mirror::Object::monitor_ *before* the original reference load.
6527 // This load-load ordering is required by the read barrier.
6528 // The fast path/slow path (for Baker's algorithm) should look like:
6529 //
6530 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6531 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6532 // HeapReference<Object> ref = *src; // Original reference load.
6533 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6534 // if (is_gray) {
6535 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6536 // }
6537 //
6538 // Note: the original implementation in ReadBarrier::Barrier is
6539 // slightly more complex as:
6540 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006541 // the high-bits of rb_state, which are expected to be all zeroes
6542 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6543 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006544 // - it performs additional checks that we do not do here for
6545 // performance reasons.
6546
6547 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6548 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6549 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6550
6551 // /* int32_t */ monitor = obj->monitor_
6552 __ movl(temp_reg, Address(obj, monitor_offset));
6553 if (needs_null_check) {
6554 MaybeRecordImplicitNullCheck(instruction);
6555 }
6556 // /* LockWord */ lock_word = LockWord(monitor)
6557 static_assert(sizeof(LockWord) == sizeof(int32_t),
6558 "art::LockWord and int32_t have different sizes.");
6559 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6560 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6561 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6562 static_assert(
6563 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6564 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6565
6566 // Load fence to prevent load-load reordering.
6567 // Note that this is a no-op, thanks to the x86-64 memory model.
6568 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6569
6570 // The actual reference load.
6571 // /* HeapReference<Object> */ ref = *src
6572 __ movl(ref_reg, src);
6573
6574 // Object* ref = ref_addr->AsMirrorPtr()
6575 __ MaybeUnpoisonHeapReference(ref_reg);
6576
6577 // Slow path used to mark the object `ref` when it is gray.
6578 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006579 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006580 AddSlowPath(slow_path);
6581
6582 // if (rb_state == ReadBarrier::gray_ptr_)
6583 // ref = ReadBarrier::Mark(ref);
6584 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6585 __ j(kEqual, slow_path->GetEntryLabel());
6586 __ Bind(slow_path->GetExitLabel());
6587}
6588
6589void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6590 Location out,
6591 Location ref,
6592 Location obj,
6593 uint32_t offset,
6594 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006595 DCHECK(kEmitCompilerReadBarrier);
6596
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006597 // Insert a slow path based read barrier *after* the reference load.
6598 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006599 // If heap poisoning is enabled, the unpoisoning of the loaded
6600 // reference will be carried out by the runtime within the slow
6601 // path.
6602 //
6603 // Note that `ref` currently does not get unpoisoned (when heap
6604 // poisoning is enabled), which is alright as the `ref` argument is
6605 // not used by the artReadBarrierSlow entry point.
6606 //
6607 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6608 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6609 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6610 AddSlowPath(slow_path);
6611
Roland Levillain0d5a2812015-11-13 10:07:31 +00006612 __ jmp(slow_path->GetEntryLabel());
6613 __ Bind(slow_path->GetExitLabel());
6614}
6615
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006616void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6617 Location out,
6618 Location ref,
6619 Location obj,
6620 uint32_t offset,
6621 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006622 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006623 // Baker's read barriers shall be handled by the fast path
6624 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6625 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006626 // If heap poisoning is enabled, unpoisoning will be taken care of
6627 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006628 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006629 } else if (kPoisonHeapReferences) {
6630 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6631 }
6632}
6633
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006634void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6635 Location out,
6636 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006637 DCHECK(kEmitCompilerReadBarrier);
6638
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006639 // Insert a slow path based read barrier *after* the GC root load.
6640 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006641 // Note that GC roots are not affected by heap poisoning, so we do
6642 // not need to do anything special for this here.
6643 SlowPathCode* slow_path =
6644 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6645 AddSlowPath(slow_path);
6646
Roland Levillain0d5a2812015-11-13 10:07:31 +00006647 __ jmp(slow_path->GetEntryLabel());
6648 __ Bind(slow_path->GetExitLabel());
6649}
6650
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006651void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006652 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006653 LOG(FATAL) << "Unreachable";
6654}
6655
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006656void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006657 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006658 LOG(FATAL) << "Unreachable";
6659}
6660
Mark Mendellfe57faa2015-09-18 09:26:15 -04006661// Simple implementation of packed switch - generate cascaded compare/jumps.
6662void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6663 LocationSummary* locations =
6664 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6665 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006666 locations->AddTemp(Location::RequiresRegister());
6667 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006668}
6669
6670void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6671 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006672 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006673 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006674 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6675 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6676 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006677 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6678
6679 // Should we generate smaller inline compare/jumps?
6680 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6681 // Figure out the correct compare values and jump conditions.
6682 // Handle the first compare/branch as a special case because it might
6683 // jump to the default case.
6684 DCHECK_GT(num_entries, 2u);
6685 Condition first_condition;
6686 uint32_t index;
6687 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6688 if (lower_bound != 0) {
6689 first_condition = kLess;
6690 __ cmpl(value_reg_in, Immediate(lower_bound));
6691 __ j(first_condition, codegen_->GetLabelOf(default_block));
6692 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6693
6694 index = 1;
6695 } else {
6696 // Handle all the compare/jumps below.
6697 first_condition = kBelow;
6698 index = 0;
6699 }
6700
6701 // Handle the rest of the compare/jumps.
6702 for (; index + 1 < num_entries; index += 2) {
6703 int32_t compare_to_value = lower_bound + index + 1;
6704 __ cmpl(value_reg_in, Immediate(compare_to_value));
6705 // Jump to successors[index] if value < case_value[index].
6706 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6707 // Jump to successors[index + 1] if value == case_value[index + 1].
6708 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6709 }
6710
6711 if (index != num_entries) {
6712 // There are an odd number of entries. Handle the last one.
6713 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006714 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006715 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6716 }
6717
6718 // And the default for any other value.
6719 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6720 __ jmp(codegen_->GetLabelOf(default_block));
6721 }
6722 return;
6723 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006724
6725 // Remove the bias, if needed.
6726 Register value_reg_out = value_reg_in.AsRegister();
6727 if (lower_bound != 0) {
6728 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6729 value_reg_out = temp_reg.AsRegister();
6730 }
6731 CpuRegister value_reg(value_reg_out);
6732
6733 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006734 __ cmpl(value_reg, Immediate(num_entries - 1));
6735 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006736
Mark Mendell9c86b482015-09-18 13:36:07 -04006737 // We are in the range of the table.
6738 // Load the address of the jump table in the constant area.
6739 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006740
Mark Mendell9c86b482015-09-18 13:36:07 -04006741 // Load the (signed) offset from the jump table.
6742 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6743
6744 // Add the offset to the address of the table base.
6745 __ addq(temp_reg, base_reg);
6746
6747 // And jump.
6748 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006749}
6750
Aart Bikc5d47542016-01-27 17:00:35 -08006751void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6752 if (value == 0) {
6753 __ xorl(dest, dest);
6754 } else {
6755 __ movl(dest, Immediate(value));
6756 }
6757}
6758
Mark Mendell92e83bf2015-05-07 11:25:03 -04006759void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6760 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006761 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006762 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006763 } else if (IsUint<32>(value)) {
6764 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006765 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6766 } else {
6767 __ movq(dest, Immediate(value));
6768 }
6769}
6770
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006771void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6772 if (value == 0) {
6773 __ xorps(dest, dest);
6774 } else {
6775 __ movss(dest, LiteralInt32Address(value));
6776 }
6777}
6778
6779void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6780 if (value == 0) {
6781 __ xorpd(dest, dest);
6782 } else {
6783 __ movsd(dest, LiteralInt64Address(value));
6784 }
6785}
6786
6787void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6788 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6789}
6790
6791void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6792 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6793}
6794
Aart Bika19616e2016-02-01 18:57:58 -08006795void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6796 if (value == 0) {
6797 __ testl(dest, dest);
6798 } else {
6799 __ cmpl(dest, Immediate(value));
6800 }
6801}
6802
6803void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6804 if (IsInt<32>(value)) {
6805 if (value == 0) {
6806 __ testq(dest, dest);
6807 } else {
6808 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6809 }
6810 } else {
6811 // Value won't fit in an int.
6812 __ cmpq(dest, LiteralInt64Address(value));
6813 }
6814}
6815
Mark Mendellcfa410b2015-05-25 16:02:44 -04006816void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6817 DCHECK(dest.IsDoubleStackSlot());
6818 if (IsInt<32>(value)) {
6819 // Can move directly as an int32 constant.
6820 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6821 Immediate(static_cast<int32_t>(value)));
6822 } else {
6823 Load64BitValue(CpuRegister(TMP), value);
6824 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6825 }
6826}
6827
Mark Mendell9c86b482015-09-18 13:36:07 -04006828/**
6829 * Class to handle late fixup of offsets into constant area.
6830 */
6831class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6832 public:
6833 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6834 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6835
6836 protected:
6837 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6838
6839 CodeGeneratorX86_64* codegen_;
6840
6841 private:
6842 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6843 // Patch the correct offset for the instruction. We use the address of the
6844 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6845 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6846 int32_t relative_position = constant_offset - pos;
6847
6848 // Patch in the right value.
6849 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6850 }
6851
6852 // Location in constant area that the fixup refers to.
6853 size_t offset_into_constant_area_;
6854};
6855
6856/**
6857 t * Class to handle late fixup of offsets to a jump table that will be created in the
6858 * constant area.
6859 */
6860class JumpTableRIPFixup : public RIPFixup {
6861 public:
6862 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6863 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6864
6865 void CreateJumpTable() {
6866 X86_64Assembler* assembler = codegen_->GetAssembler();
6867
6868 // Ensure that the reference to the jump table has the correct offset.
6869 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6870 SetOffset(offset_in_constant_table);
6871
6872 // Compute the offset from the start of the function to this jump table.
6873 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6874
6875 // Populate the jump table with the correct values for the jump table.
6876 int32_t num_entries = switch_instr_->GetNumEntries();
6877 HBasicBlock* block = switch_instr_->GetBlock();
6878 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6879 // The value that we want is the target offset - the position of the table.
6880 for (int32_t i = 0; i < num_entries; i++) {
6881 HBasicBlock* b = successors[i];
6882 Label* l = codegen_->GetLabelOf(b);
6883 DCHECK(l->IsBound());
6884 int32_t offset_to_block = l->Position() - current_table_offset;
6885 assembler->AppendInt32(offset_to_block);
6886 }
6887 }
6888
6889 private:
6890 const HPackedSwitch* switch_instr_;
6891};
6892
Mark Mendellf55c3e02015-03-26 21:07:46 -04006893void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6894 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006895 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006896 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6897 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006898 assembler->Align(4, 0);
6899 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006900
6901 // Populate any jump tables.
6902 for (auto jump_table : fixups_to_jump_tables_) {
6903 jump_table->CreateJumpTable();
6904 }
6905
6906 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006907 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006908 }
6909
6910 // And finish up.
6911 CodeGenerator::Finalize(allocator);
6912}
6913
Mark Mendellf55c3e02015-03-26 21:07:46 -04006914Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6915 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6916 return Address::RIP(fixup);
6917}
6918
6919Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6920 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6921 return Address::RIP(fixup);
6922}
6923
6924Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6925 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6926 return Address::RIP(fixup);
6927}
6928
6929Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6930 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6931 return Address::RIP(fixup);
6932}
6933
Andreas Gampe85b62f22015-09-09 13:15:38 -07006934// TODO: trg as memory.
6935void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6936 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006937 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006938 return;
6939 }
6940
6941 DCHECK_NE(type, Primitive::kPrimVoid);
6942
6943 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6944 if (trg.Equals(return_loc)) {
6945 return;
6946 }
6947
6948 // Let the parallel move resolver take care of all of this.
6949 HParallelMove parallel_move(GetGraph()->GetArena());
6950 parallel_move.AddMove(return_loc, trg, type, nullptr);
6951 GetMoveResolver()->EmitNativeCode(&parallel_move);
6952}
6953
Mark Mendell9c86b482015-09-18 13:36:07 -04006954Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6955 // Create a fixup to be used to create and address the jump table.
6956 JumpTableRIPFixup* table_fixup =
6957 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6958
6959 // We have to populate the jump tables.
6960 fixups_to_jump_tables_.push_back(table_fixup);
6961 return Address::RIP(table_fixup);
6962}
6963
Mark Mendellea5af682015-10-22 17:35:49 -04006964void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6965 const Address& addr_high,
6966 int64_t v,
6967 HInstruction* instruction) {
6968 if (IsInt<32>(v)) {
6969 int32_t v_32 = v;
6970 __ movq(addr_low, Immediate(v_32));
6971 MaybeRecordImplicitNullCheck(instruction);
6972 } else {
6973 // Didn't fit in a register. Do it in pieces.
6974 int32_t low_v = Low32Bits(v);
6975 int32_t high_v = High32Bits(v);
6976 __ movl(addr_low, Immediate(low_v));
6977 MaybeRecordImplicitNullCheck(instruction);
6978 __ movl(addr_high, Immediate(high_v));
6979 }
6980}
6981
Roland Levillain4d027112015-07-01 15:41:14 +01006982#undef __
6983
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006984} // namespace x86_64
6985} // namespace art