blob: 5d5fa8504ac7cb9b2f56c6340458724c147db11c [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400197 // Are we using an array length from memory?
198 HInstruction* array_length = instruction_->InputAt(1);
199 Location length_loc = locations->InAt(1);
200 InvokeRuntimeCallingConvention calling_convention;
201 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
202 // Load the array length into our temporary.
203 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
204 Location array_loc = array_length->GetLocations()->InAt(0);
205 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
206 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
207 // Check for conflicts with index.
208 if (length_loc.Equals(locations->InAt(0))) {
209 // We know we aren't using parameter 2.
210 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
211 }
212 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
213 }
214
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000215 // We're moving two locations to locations that could overlap, so we need a parallel
216 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000217 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100218 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000219 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100220 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400221 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100222 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
223 Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100224 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
225 ? QUICK_ENTRY_POINT(pThrowStringBounds)
226 : QUICK_ENTRY_POINT(pThrowArrayBounds);
227 x86_64_codegen->InvokeRuntime(entry_point_offset,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000228 instruction_,
229 instruction_->GetDexPc(),
230 this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100231 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000232 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100233 }
234
Alexandre Rames8158f282015-08-07 10:26:17 +0100235 bool IsFatal() const OVERRIDE { return true; }
236
Alexandre Rames9931f312015-06-19 14:47:01 +0100237 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
238
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100240 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
241};
242
Andreas Gampe85b62f22015-09-09 13:15:38 -0700243class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000245 LoadClassSlowPathX86_64(HLoadClass* cls,
246 HInstruction* at,
247 uint32_t dex_pc,
248 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000249 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000250 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
251 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000253 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000254 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000255 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100257
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000258 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000259
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000261 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000262 x86_64_codegen->InvokeRuntime(do_clinit_ ?
263 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
264 QUICK_ENTRY_POINT(pInitializeType),
265 at_,
266 dex_pc_,
267 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000268 if (do_clinit_) {
269 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
270 } else {
271 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
272 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000274 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000276 if (out.IsValid()) {
277 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000278 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000279 }
280
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000281 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100282 __ jmp(GetExitLabel());
283 }
284
Alexandre Rames9931f312015-06-19 14:47:01 +0100285 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
286
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100287 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000288 // The class this slow path will load.
289 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100290
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000291 // The instruction where this slow path is happening.
292 // (Might be the load class or an initialization check).
293 HInstruction* const at_;
294
295 // The dex PC of `at_`.
296 const uint32_t dex_pc_;
297
298 // Whether to initialize the class.
299 const bool do_clinit_;
300
301 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100302};
303
Andreas Gampe85b62f22015-09-09 13:15:38 -0700304class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000305 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000306 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000307
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000308 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 LocationSummary* locations = instruction_->GetLocations();
310 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
311
Roland Levillain0d5a2812015-11-13 10:07:31 +0000312 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000313 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000314 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000315
316 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000317 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
318 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000319 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
320 instruction_,
321 instruction_->GetDexPc(),
322 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000323 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000324 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000325 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000326 __ jmp(GetExitLabel());
327 }
328
Alexandre Rames9931f312015-06-19 14:47:01 +0100329 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
330
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000331 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000332 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
333};
334
Andreas Gampe85b62f22015-09-09 13:15:38 -0700335class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000337 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000338 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100342 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
343 : locations->Out();
344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000351 if (!is_fatal_) {
352 SaveLiveRegisters(codegen, locations);
353 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000354
355 // We're moving two locations to locations that could overlap, so we need a parallel
356 // move resolver.
357 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000358 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100359 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000360 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100361 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100362 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100363 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
364 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000365
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
368 instruction_,
369 dex_pc,
370 this);
371 CheckEntrypointTypes<
Andreas Gampe67409972016-07-19 22:34:53 -0700372 kQuickInstanceofNonTrivial, size_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000373 } else {
374 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000375 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
376 instruction_,
377 dex_pc,
378 this);
379 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000380 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000381
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000382 if (!is_fatal_) {
383 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000384 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000386
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 RestoreLiveRegisters(codegen, locations);
388 __ jmp(GetExitLabel());
389 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000390 }
391
Alexandre Rames9931f312015-06-19 14:47:01 +0100392 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
393
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000394 bool IsFatal() const OVERRIDE { return is_fatal_; }
395
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000397 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000398
399 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
400};
401
Andreas Gampe85b62f22015-09-09 13:15:38 -0700402class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 public:
Aart Bik42249c32016-01-07 15:33:50 -0800404 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000408 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000411 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800412 instruction_,
413 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000414 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000415 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Alexandre Rames9931f312015-06-19 14:47:01 +0100418 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
428 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
434 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
438 Primitive::kPrimNot,
439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
443 Primitive::kPrimInt,
444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
448 Primitive::kPrimNot,
449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
453 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
454 instruction_,
455 instruction_->GetDexPc(),
456 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000457 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100458 RestoreLiveRegisters(codegen, locations);
459 __ jmp(GetExitLabel());
460 }
461
462 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
463
464 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100465 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
466};
467
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468// Slow path marking an object during a read barrier.
469class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
470 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100471 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location obj)
472 : SlowPathCode(instruction), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000473 DCHECK(kEmitCompilerReadBarrier);
474 }
475
476 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
477
478 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
479 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain02b75802016-07-13 11:54:35 +0100480 Register reg = obj_.AsRegister<Register>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000481 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100482 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000483 DCHECK(instruction_->IsInstanceFieldGet() ||
484 instruction_->IsStaticFieldGet() ||
485 instruction_->IsArrayGet() ||
486 instruction_->IsLoadClass() ||
487 instruction_->IsLoadString() ||
488 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100489 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100490 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000491 << "Unexpected instruction in read barrier marking slow path: "
492 << instruction_->DebugName();
493
494 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100495 // No need to save live registers; it's taken care of by the
496 // entrypoint. Also, there is no need to update the stack mask,
497 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000498 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100499 DCHECK_NE(reg, RSP);
500 DCHECK(0 <= reg && reg < kNumberOfCpuRegisters) << reg;
501 // "Compact" slow path, saving two moves.
502 //
503 // Instead of using the standard runtime calling convention (input
504 // and output in R0):
505 //
506 // RDI <- obj
507 // RAX <- ReadBarrierMark(RDI)
508 // obj <- RAX
509 //
510 // we just use rX (the register holding `obj`) as input and output
511 // of a dedicated entrypoint:
512 //
513 // rX <- ReadBarrierMarkRegX(rX)
514 //
515 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700516 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100517 // This runtime call does not require a stack map.
518 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000519 __ jmp(GetExitLabel());
520 }
521
522 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000523 const Location obj_;
524
525 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
526};
527
Roland Levillain0d5a2812015-11-13 10:07:31 +0000528// Slow path generating a read barrier for a heap reference.
529class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
530 public:
531 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
532 Location out,
533 Location ref,
534 Location obj,
535 uint32_t offset,
536 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000537 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000538 out_(out),
539 ref_(ref),
540 obj_(obj),
541 offset_(offset),
542 index_(index) {
543 DCHECK(kEmitCompilerReadBarrier);
544 // If `obj` is equal to `out` or `ref`, it means the initial
545 // object has been overwritten by (or after) the heap object
546 // reference load to be instrumented, e.g.:
547 //
548 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000549 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000550 //
551 // In that case, we have lost the information about the original
552 // object, and the emitted read barrier cannot work properly.
553 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
554 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
555}
556
557 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
558 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
559 LocationSummary* locations = instruction_->GetLocations();
560 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
561 DCHECK(locations->CanCall());
562 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100563 DCHECK(instruction_->IsInstanceFieldGet() ||
564 instruction_->IsStaticFieldGet() ||
565 instruction_->IsArrayGet() ||
566 instruction_->IsInstanceOf() ||
567 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100568 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000569 << "Unexpected instruction in read barrier for heap reference slow path: "
570 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000571
572 __ Bind(GetEntryLabel());
573 SaveLiveRegisters(codegen, locations);
574
575 // We may have to change the index's value, but as `index_` is a
576 // constant member (like other "inputs" of this slow path),
577 // introduce a copy of it, `index`.
578 Location index = index_;
579 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100580 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000581 if (instruction_->IsArrayGet()) {
582 // Compute real offset and store it in index_.
583 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
584 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
585 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
586 // We are about to change the value of `index_reg` (see the
587 // calls to art::x86_64::X86_64Assembler::shll and
588 // art::x86_64::X86_64Assembler::AddImmediate below), but it
589 // has not been saved by the previous call to
590 // art::SlowPathCode::SaveLiveRegisters, as it is a
591 // callee-save register --
592 // art::SlowPathCode::SaveLiveRegisters does not consider
593 // callee-save registers, as it has been designed with the
594 // assumption that callee-save registers are supposed to be
595 // handled by the called function. So, as a callee-save
596 // register, `index_reg` _would_ eventually be saved onto
597 // the stack, but it would be too late: we would have
598 // changed its value earlier. Therefore, we manually save
599 // it here into another freely available register,
600 // `free_reg`, chosen of course among the caller-save
601 // registers (as a callee-save `free_reg` register would
602 // exhibit the same problem).
603 //
604 // Note we could have requested a temporary register from
605 // the register allocator instead; but we prefer not to, as
606 // this is a slow path, and we know we can find a
607 // caller-save register that is available.
608 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
609 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
610 index_reg = free_reg;
611 index = Location::RegisterLocation(index_reg);
612 } else {
613 // The initial register stored in `index_` has already been
614 // saved in the call to art::SlowPathCode::SaveLiveRegisters
615 // (as it is not a callee-save register), so we can freely
616 // use it.
617 }
618 // Shifting the index value contained in `index_reg` by the
619 // scale factor (2) cannot overflow in practice, as the
620 // runtime is unable to allocate object arrays with a size
621 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
622 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
623 static_assert(
624 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
625 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
626 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
627 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100628 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
629 // intrinsics, `index_` is not shifted by a scale factor of 2
630 // (as in the case of ArrayGet), as it is actually an offset
631 // to an object field within an object.
632 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000633 DCHECK(instruction_->GetLocations()->Intrinsified());
634 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
635 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
636 << instruction_->AsInvoke()->GetIntrinsic();
637 DCHECK_EQ(offset_, 0U);
638 DCHECK(index_.IsRegister());
639 }
640 }
641
642 // We're moving two or three locations to locations that could
643 // overlap, so we need a parallel move resolver.
644 InvokeRuntimeCallingConvention calling_convention;
645 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
646 parallel_move.AddMove(ref_,
647 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
648 Primitive::kPrimNot,
649 nullptr);
650 parallel_move.AddMove(obj_,
651 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
652 Primitive::kPrimNot,
653 nullptr);
654 if (index.IsValid()) {
655 parallel_move.AddMove(index,
656 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
657 Primitive::kPrimInt,
658 nullptr);
659 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
660 } else {
661 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
662 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
663 }
664 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
665 instruction_,
666 instruction_->GetDexPc(),
667 this);
668 CheckEntrypointTypes<
669 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
670 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
671
672 RestoreLiveRegisters(codegen, locations);
673 __ jmp(GetExitLabel());
674 }
675
676 const char* GetDescription() const OVERRIDE {
677 return "ReadBarrierForHeapReferenceSlowPathX86_64";
678 }
679
680 private:
681 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
682 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
683 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
684 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
685 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
686 return static_cast<CpuRegister>(i);
687 }
688 }
689 // We shall never fail to find a free caller-save register, as
690 // there are more than two core caller-save registers on x86-64
691 // (meaning it is possible to find one which is different from
692 // `ref` and `obj`).
693 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
694 LOG(FATAL) << "Could not find a free caller-save register";
695 UNREACHABLE();
696 }
697
Roland Levillain0d5a2812015-11-13 10:07:31 +0000698 const Location out_;
699 const Location ref_;
700 const Location obj_;
701 const uint32_t offset_;
702 // An additional location containing an index to an array.
703 // Only used for HArrayGet and the UnsafeGetObject &
704 // UnsafeGetObjectVolatile intrinsics.
705 const Location index_;
706
707 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
708};
709
710// Slow path generating a read barrier for a GC root.
711class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
712 public:
713 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000714 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000715 DCHECK(kEmitCompilerReadBarrier);
716 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000717
718 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
719 LocationSummary* locations = instruction_->GetLocations();
720 DCHECK(locations->CanCall());
721 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000722 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
723 << "Unexpected instruction in read barrier for GC root slow path: "
724 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000725
726 __ Bind(GetEntryLabel());
727 SaveLiveRegisters(codegen, locations);
728
729 InvokeRuntimeCallingConvention calling_convention;
730 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
731 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
732 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
733 instruction_,
734 instruction_->GetDexPc(),
735 this);
736 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
737 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
738
739 RestoreLiveRegisters(codegen, locations);
740 __ jmp(GetExitLabel());
741 }
742
743 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
744
745 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000746 const Location out_;
747 const Location root_;
748
749 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
750};
751
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100752#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700753// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
754#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100755
Roland Levillain4fa13f62015-07-06 18:11:54 +0100756inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700757 switch (cond) {
758 case kCondEQ: return kEqual;
759 case kCondNE: return kNotEqual;
760 case kCondLT: return kLess;
761 case kCondLE: return kLessEqual;
762 case kCondGT: return kGreater;
763 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700764 case kCondB: return kBelow;
765 case kCondBE: return kBelowEqual;
766 case kCondA: return kAbove;
767 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700768 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100769 LOG(FATAL) << "Unreachable";
770 UNREACHABLE();
771}
772
Aart Bike9f37602015-10-09 11:15:55 -0700773// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100774inline Condition X86_64FPCondition(IfCondition cond) {
775 switch (cond) {
776 case kCondEQ: return kEqual;
777 case kCondNE: return kNotEqual;
778 case kCondLT: return kBelow;
779 case kCondLE: return kBelowEqual;
780 case kCondGT: return kAbove;
781 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700782 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100783 };
784 LOG(FATAL) << "Unreachable";
785 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700786}
787
Vladimir Markodc151b22015-10-15 18:02:30 +0100788HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
789 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
790 MethodReference target_method ATTRIBUTE_UNUSED) {
791 switch (desired_dispatch_info.code_ptr_location) {
792 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
793 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
794 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
795 return HInvokeStaticOrDirect::DispatchInfo {
796 desired_dispatch_info.method_load_kind,
797 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
798 desired_dispatch_info.method_load_data,
799 0u
800 };
801 default:
802 return desired_dispatch_info;
803 }
804}
805
Serguei Katkov288c7a82016-05-16 11:53:15 +0600806Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
807 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800808 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000809 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
810 switch (invoke->GetMethodLoadKind()) {
811 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
812 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000813 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000814 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000815 break;
816 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000817 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000818 break;
819 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
820 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
821 break;
822 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
823 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
824 method_patches_.emplace_back(invoke->GetTargetMethod());
825 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
826 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000827 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000828 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000829 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000830 // Bind a new fixup label at the end of the "movl" insn.
831 uint32_t offset = invoke->GetDexCacheArrayOffset();
832 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000833 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000834 }
Vladimir Marko58155012015-08-19 12:49:41 +0000835 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000836 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000837 Register method_reg;
838 CpuRegister reg = temp.AsRegister<CpuRegister>();
839 if (current_method.IsRegister()) {
840 method_reg = current_method.AsRegister<Register>();
841 } else {
842 DCHECK(invoke->GetLocations()->Intrinsified());
843 DCHECK(!current_method.IsValid());
844 method_reg = reg.AsRegister();
845 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
846 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000847 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100848 __ movq(reg,
849 Address(CpuRegister(method_reg),
850 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100851 // temp = temp[index_in_cache];
852 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
853 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000854 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
855 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100856 }
Vladimir Marko58155012015-08-19 12:49:41 +0000857 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600858 return callee_method;
859}
860
861void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
862 Location temp) {
863 // All registers are assumed to be correctly set up.
864 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000865
866 switch (invoke->GetCodePtrLocation()) {
867 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
868 __ call(&frame_entry_label_);
869 break;
870 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
871 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
872 Label* label = &relative_call_patches_.back().label;
873 __ call(label); // Bind to the patch label, override at link time.
874 __ Bind(label); // Bind the label at the end of the "call" insn.
875 break;
876 }
877 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
878 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100879 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
880 LOG(FATAL) << "Unsupported";
881 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000882 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
883 // (callee_method + offset_of_quick_compiled_code)()
884 __ call(Address(callee_method.AsRegister<CpuRegister>(),
885 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700886 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000887 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000888 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800889
890 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800891}
892
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000893void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
894 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
895 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
896 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000897
898 // Use the calling convention instead of the location of the receiver, as
899 // intrinsics may have put the receiver in a different register. In the intrinsics
900 // slow path, the arguments have been moved to the right place, so here we are
901 // guaranteed that the receiver is the first register of the calling convention.
902 InvokeDexCallingConvention calling_convention;
903 Register receiver = calling_convention.GetRegisterAt(0);
904
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000905 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000906 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000907 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000908 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000909 // Instead of simply (possibly) unpoisoning `temp` here, we should
910 // emit a read barrier for the previous class reference load.
911 // However this is not required in practice, as this is an
912 // intermediate/temporary reference and because the current
913 // concurrent copying collector keeps the from-space memory
914 // intact/accessible until the end of the marking phase (the
915 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000916 __ MaybeUnpoisonHeapReference(temp);
917 // temp = temp->GetMethodAt(method_offset);
918 __ movq(temp, Address(temp, method_offset));
919 // call temp->GetEntryPoint();
920 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700921 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000922}
923
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000924void CodeGeneratorX86_64::RecordSimplePatch() {
925 if (GetCompilerOptions().GetIncludePatchInformation()) {
926 simple_patches_.emplace_back();
927 __ Bind(&simple_patches_.back());
928 }
929}
930
931void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
932 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
933 __ Bind(&string_patches_.back().label);
934}
935
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100936void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
937 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
938 __ Bind(&type_patches_.back().label);
939}
940
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000941Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
942 uint32_t element_offset) {
943 // Add a patch entry and return the label.
944 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
945 return &pc_relative_dex_cache_patches_.back().label;
946}
947
Vladimir Marko58155012015-08-19 12:49:41 +0000948void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
949 DCHECK(linker_patches->empty());
950 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000951 method_patches_.size() +
952 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000953 pc_relative_dex_cache_patches_.size() +
954 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100955 string_patches_.size() +
956 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000957 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000958 // The label points to the end of the "movl" insn but the literal offset for method
959 // patch needs to point to the embedded constant which occupies the last 4 bytes.
960 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000961 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000962 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000963 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
964 info.target_method.dex_file,
965 info.target_method.dex_method_index));
966 }
967 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000968 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000969 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
970 info.target_method.dex_file,
971 info.target_method.dex_method_index));
972 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000973 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
974 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000975 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
976 &info.target_dex_file,
977 info.label.Position(),
978 info.element_offset));
979 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000980 for (const Label& label : simple_patches_) {
981 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
982 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
983 }
984 for (const StringPatchInfo<Label>& info : string_patches_) {
985 // These are always PC-relative, see GetSupportedLoadStringKind().
986 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
987 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
988 &info.dex_file,
989 info.label.Position(),
990 info.string_index));
991 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100992 for (const TypePatchInfo<Label>& info : type_patches_) {
993 // These are always PC-relative, see GetSupportedLoadClassKind().
994 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
995 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
996 &info.dex_file,
997 info.label.Position(),
998 info.type_index));
999 }
Vladimir Marko58155012015-08-19 12:49:41 +00001000}
1001
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001002void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001003 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001004}
1005
1006void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001007 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001008}
1009
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001010size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1011 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1012 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001013}
1014
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001015size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1016 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1017 return kX86_64WordSize;
1018}
1019
1020size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1021 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1022 return kX86_64WordSize;
1023}
1024
1025size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1026 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1027 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001028}
1029
Calin Juravle175dc732015-08-25 15:42:32 +01001030void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1031 HInstruction* instruction,
1032 uint32_t dex_pc,
1033 SlowPathCode* slow_path) {
Andreas Gampe542451c2016-07-26 09:02:02 -07001034 InvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value(),
Calin Juravle175dc732015-08-25 15:42:32 +01001035 instruction,
1036 dex_pc,
1037 slow_path);
1038}
1039
1040void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +01001041 HInstruction* instruction,
1042 uint32_t dex_pc,
1043 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001044 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001045 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +01001046 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +01001047}
1048
Roland Levillaindec8f632016-07-22 17:10:06 +01001049void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1050 HInstruction* instruction,
1051 SlowPathCode* slow_path) {
1052 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1053 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1054}
1055
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001056static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001057// Use a fake return address register to mimic Quick.
1058static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001059CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001060 const X86_64InstructionSetFeatures& isa_features,
1061 const CompilerOptions& compiler_options,
1062 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001063 : CodeGenerator(graph,
1064 kNumberOfCpuRegisters,
1065 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001066 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001067 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1068 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001069 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001070 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1071 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001072 compiler_options,
1073 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001074 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001075 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001076 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001077 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001078 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001079 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001080 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001081 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1082 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001083 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001084 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1085 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001086 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001087 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001088 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1089}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001090
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001091InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1092 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001093 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001094 assembler_(codegen->GetAssembler()),
1095 codegen_(codegen) {}
1096
David Brazdil58282f42016-01-14 12:45:10 +00001097void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001098 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001099 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001100
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001101 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001102 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001103}
1104
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001105static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001106 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001107}
David Srbecky9d8606d2015-04-12 09:35:32 +01001108
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001109static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001110 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001111}
1112
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001113void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001114 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001115 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001116 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001117 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001118 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001119
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001120 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001121 __ testq(CpuRegister(RAX), Address(
1122 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001123 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001124 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001125
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001126 if (HasEmptyFrame()) {
1127 return;
1128 }
1129
Nicolas Geoffray98893962015-01-21 12:32:32 +00001130 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001131 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001132 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001133 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001134 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1135 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001136 }
1137 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001138
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001139 int adjust = GetFrameSize() - GetCoreSpillSize();
1140 __ subq(CpuRegister(RSP), Immediate(adjust));
1141 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001142 uint32_t xmm_spill_location = GetFpuSpillStart();
1143 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001144
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001145 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1146 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001147 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1148 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1149 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001150 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001151 }
1152
Mathieu Chartiere401d142015-04-22 13:56:20 -07001153 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001154 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001155}
1156
1157void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001158 __ cfi().RememberState();
1159 if (!HasEmptyFrame()) {
1160 uint32_t xmm_spill_location = GetFpuSpillStart();
1161 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1162 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1163 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1164 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1165 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1166 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1167 }
1168 }
1169
1170 int adjust = GetFrameSize() - GetCoreSpillSize();
1171 __ addq(CpuRegister(RSP), Immediate(adjust));
1172 __ cfi().AdjustCFAOffset(-adjust);
1173
1174 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1175 Register reg = kCoreCalleeSaves[i];
1176 if (allocated_registers_.ContainsCoreRegister(reg)) {
1177 __ popq(CpuRegister(reg));
1178 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1179 __ cfi().Restore(DWARFReg(reg));
1180 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001181 }
1182 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001183 __ ret();
1184 __ cfi().RestoreState();
1185 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001186}
1187
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001188void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1189 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001190}
1191
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001192void CodeGeneratorX86_64::Move(Location destination, Location source) {
1193 if (source.Equals(destination)) {
1194 return;
1195 }
1196 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001197 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001198 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001199 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001200 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001201 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001202 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001203 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1204 } else if (source.IsConstant()) {
1205 HConstant* constant = source.GetConstant();
1206 if (constant->IsLongConstant()) {
1207 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1208 } else {
1209 Load32BitValue(dest, GetInt32ValueOf(constant));
1210 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001211 } else {
1212 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001213 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001214 }
1215 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001216 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001217 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001218 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001219 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001220 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1221 } else if (source.IsConstant()) {
1222 HConstant* constant = source.GetConstant();
1223 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1224 if (constant->IsFloatConstant()) {
1225 Load32BitValue(dest, static_cast<int32_t>(value));
1226 } else {
1227 Load64BitValue(dest, value);
1228 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001229 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001230 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001231 } else {
1232 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001233 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001234 }
1235 } else if (destination.IsStackSlot()) {
1236 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001237 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001238 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001239 } else if (source.IsFpuRegister()) {
1240 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001241 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001242 } else if (source.IsConstant()) {
1243 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001244 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001245 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001246 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001247 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001248 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1249 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001250 }
1251 } else {
1252 DCHECK(destination.IsDoubleStackSlot());
1253 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001254 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001255 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001256 } else if (source.IsFpuRegister()) {
1257 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001258 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001259 } else if (source.IsConstant()) {
1260 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001261 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001262 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001263 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001264 } else {
1265 DCHECK(constant->IsLongConstant());
1266 value = constant->AsLongConstant()->GetValue();
1267 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001268 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001269 } else {
1270 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001271 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1272 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001273 }
1274 }
1275}
1276
Calin Juravle175dc732015-08-25 15:42:32 +01001277void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1278 DCHECK(location.IsRegister());
1279 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1280}
1281
Calin Juravlee460d1d2015-09-29 04:52:17 +01001282void CodeGeneratorX86_64::MoveLocation(
1283 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1284 Move(dst, src);
1285}
1286
1287void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1288 if (location.IsRegister()) {
1289 locations->AddTemp(location);
1290 } else {
1291 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1292 }
1293}
1294
David Brazdilfc6a86a2015-06-26 10:33:45 +00001295void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001296 DCHECK(!successor->IsExitBlock());
1297
1298 HBasicBlock* block = got->GetBlock();
1299 HInstruction* previous = got->GetPrevious();
1300
1301 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001302 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001303 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1304 return;
1305 }
1306
1307 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1308 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1309 }
1310 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001311 __ jmp(codegen_->GetLabelOf(successor));
1312 }
1313}
1314
David Brazdilfc6a86a2015-06-26 10:33:45 +00001315void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1316 got->SetLocations(nullptr);
1317}
1318
1319void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1320 HandleGoto(got, got->GetSuccessor());
1321}
1322
1323void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1324 try_boundary->SetLocations(nullptr);
1325}
1326
1327void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1328 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1329 if (!successor->IsExitBlock()) {
1330 HandleGoto(try_boundary, successor);
1331 }
1332}
1333
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001334void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1335 exit->SetLocations(nullptr);
1336}
1337
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001338void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001339}
1340
Mark Mendell152408f2015-12-31 12:28:50 -05001341template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001342void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001343 LabelType* true_label,
1344 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001345 if (cond->IsFPConditionTrueIfNaN()) {
1346 __ j(kUnordered, true_label);
1347 } else if (cond->IsFPConditionFalseIfNaN()) {
1348 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001349 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001350 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001351}
1352
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001353void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001354 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001355
Mark Mendellc4701932015-04-10 13:18:51 -04001356 Location left = locations->InAt(0);
1357 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001358 Primitive::Type type = condition->InputAt(0)->GetType();
1359 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001360 case Primitive::kPrimBoolean:
1361 case Primitive::kPrimByte:
1362 case Primitive::kPrimChar:
1363 case Primitive::kPrimShort:
1364 case Primitive::kPrimInt:
1365 case Primitive::kPrimNot: {
1366 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1367 if (right.IsConstant()) {
1368 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1369 if (value == 0) {
1370 __ testl(left_reg, left_reg);
1371 } else {
1372 __ cmpl(left_reg, Immediate(value));
1373 }
1374 } else if (right.IsStackSlot()) {
1375 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1376 } else {
1377 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1378 }
1379 break;
1380 }
Mark Mendellc4701932015-04-10 13:18:51 -04001381 case Primitive::kPrimLong: {
1382 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1383 if (right.IsConstant()) {
1384 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001385 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001386 } else if (right.IsDoubleStackSlot()) {
1387 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1388 } else {
1389 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1390 }
Mark Mendellc4701932015-04-10 13:18:51 -04001391 break;
1392 }
1393 case Primitive::kPrimFloat: {
1394 if (right.IsFpuRegister()) {
1395 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1396 } else if (right.IsConstant()) {
1397 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1398 codegen_->LiteralFloatAddress(
1399 right.GetConstant()->AsFloatConstant()->GetValue()));
1400 } else {
1401 DCHECK(right.IsStackSlot());
1402 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1403 Address(CpuRegister(RSP), right.GetStackIndex()));
1404 }
Mark Mendellc4701932015-04-10 13:18:51 -04001405 break;
1406 }
1407 case Primitive::kPrimDouble: {
1408 if (right.IsFpuRegister()) {
1409 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1410 } else if (right.IsConstant()) {
1411 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1412 codegen_->LiteralDoubleAddress(
1413 right.GetConstant()->AsDoubleConstant()->GetValue()));
1414 } else {
1415 DCHECK(right.IsDoubleStackSlot());
1416 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1417 Address(CpuRegister(RSP), right.GetStackIndex()));
1418 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001419 break;
1420 }
1421 default:
1422 LOG(FATAL) << "Unexpected condition type " << type;
1423 }
1424}
1425
1426template<class LabelType>
1427void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1428 LabelType* true_target_in,
1429 LabelType* false_target_in) {
1430 // Generated branching requires both targets to be explicit. If either of the
1431 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1432 LabelType fallthrough_target;
1433 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1434 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1435
1436 // Generate the comparison to set the CC.
1437 GenerateCompareTest(condition);
1438
1439 // Now generate the correct jump(s).
1440 Primitive::Type type = condition->InputAt(0)->GetType();
1441 switch (type) {
1442 case Primitive::kPrimLong: {
1443 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1444 break;
1445 }
1446 case Primitive::kPrimFloat: {
1447 GenerateFPJumps(condition, true_target, false_target);
1448 break;
1449 }
1450 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001451 GenerateFPJumps(condition, true_target, false_target);
1452 break;
1453 }
1454 default:
1455 LOG(FATAL) << "Unexpected condition type " << type;
1456 }
1457
David Brazdil0debae72015-11-12 18:37:00 +00001458 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001459 __ jmp(false_target);
1460 }
David Brazdil0debae72015-11-12 18:37:00 +00001461
1462 if (fallthrough_target.IsLinked()) {
1463 __ Bind(&fallthrough_target);
1464 }
Mark Mendellc4701932015-04-10 13:18:51 -04001465}
1466
David Brazdil0debae72015-11-12 18:37:00 +00001467static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1468 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1469 // are set only strictly before `branch`. We can't use the eflags on long
1470 // conditions if they are materialized due to the complex branching.
1471 return cond->IsCondition() &&
1472 cond->GetNext() == branch &&
1473 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1474}
1475
Mark Mendell152408f2015-12-31 12:28:50 -05001476template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001477void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001478 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001479 LabelType* true_target,
1480 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001481 HInstruction* cond = instruction->InputAt(condition_input_index);
1482
1483 if (true_target == nullptr && false_target == nullptr) {
1484 // Nothing to do. The code always falls through.
1485 return;
1486 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001487 // Constant condition, statically compared against "true" (integer value 1).
1488 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001489 if (true_target != nullptr) {
1490 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001491 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001492 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001493 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001494 if (false_target != nullptr) {
1495 __ jmp(false_target);
1496 }
1497 }
1498 return;
1499 }
1500
1501 // The following code generates these patterns:
1502 // (1) true_target == nullptr && false_target != nullptr
1503 // - opposite condition true => branch to false_target
1504 // (2) true_target != nullptr && false_target == nullptr
1505 // - condition true => branch to true_target
1506 // (3) true_target != nullptr && false_target != nullptr
1507 // - condition true => branch to true_target
1508 // - branch to false_target
1509 if (IsBooleanValueOrMaterializedCondition(cond)) {
1510 if (AreEflagsSetFrom(cond, instruction)) {
1511 if (true_target == nullptr) {
1512 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1513 } else {
1514 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1515 }
1516 } else {
1517 // Materialized condition, compare against 0.
1518 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1519 if (lhs.IsRegister()) {
1520 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1521 } else {
1522 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1523 }
1524 if (true_target == nullptr) {
1525 __ j(kEqual, false_target);
1526 } else {
1527 __ j(kNotEqual, true_target);
1528 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001529 }
1530 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001531 // Condition has not been materialized, use its inputs as the
1532 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001533 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001534
David Brazdil0debae72015-11-12 18:37:00 +00001535 // If this is a long or FP comparison that has been folded into
1536 // the HCondition, generate the comparison directly.
1537 Primitive::Type type = condition->InputAt(0)->GetType();
1538 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1539 GenerateCompareTestAndBranch(condition, true_target, false_target);
1540 return;
1541 }
1542
1543 Location lhs = condition->GetLocations()->InAt(0);
1544 Location rhs = condition->GetLocations()->InAt(1);
1545 if (rhs.IsRegister()) {
1546 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1547 } else if (rhs.IsConstant()) {
1548 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001549 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001550 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001551 __ cmpl(lhs.AsRegister<CpuRegister>(),
1552 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1553 }
1554 if (true_target == nullptr) {
1555 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1556 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001557 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001558 }
Dave Allison20dfc792014-06-16 20:44:29 -07001559 }
David Brazdil0debae72015-11-12 18:37:00 +00001560
1561 // If neither branch falls through (case 3), the conditional branch to `true_target`
1562 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1563 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001564 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001565 }
1566}
1567
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001568void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1570 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001571 locations->SetInAt(0, Location::Any());
1572 }
1573}
1574
1575void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001576 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1577 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1578 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1579 nullptr : codegen_->GetLabelOf(true_successor);
1580 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1581 nullptr : codegen_->GetLabelOf(false_successor);
1582 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001583}
1584
1585void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1586 LocationSummary* locations = new (GetGraph()->GetArena())
1587 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001588 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001589 locations->SetInAt(0, Location::Any());
1590 }
1591}
1592
1593void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001594 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001595 GenerateTestAndBranch<Label>(deoptimize,
1596 /* condition_input_index */ 0,
1597 slow_path->GetEntryLabel(),
1598 /* false_target */ nullptr);
1599}
1600
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001601static bool SelectCanUseCMOV(HSelect* select) {
1602 // There are no conditional move instructions for XMMs.
1603 if (Primitive::IsFloatingPointType(select->GetType())) {
1604 return false;
1605 }
1606
1607 // A FP condition doesn't generate the single CC that we need.
1608 HInstruction* condition = select->GetCondition();
1609 if (condition->IsCondition() &&
1610 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1611 return false;
1612 }
1613
1614 // We can generate a CMOV for this Select.
1615 return true;
1616}
1617
David Brazdil74eb1b22015-12-14 11:44:01 +00001618void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1619 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1620 if (Primitive::IsFloatingPointType(select->GetType())) {
1621 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001622 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001623 } else {
1624 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001625 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001626 if (select->InputAt(1)->IsConstant()) {
1627 locations->SetInAt(1, Location::RequiresRegister());
1628 } else {
1629 locations->SetInAt(1, Location::Any());
1630 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001631 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001632 locations->SetInAt(1, Location::Any());
1633 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001634 }
1635 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1636 locations->SetInAt(2, Location::RequiresRegister());
1637 }
1638 locations->SetOut(Location::SameAsFirstInput());
1639}
1640
1641void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1642 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001643 if (SelectCanUseCMOV(select)) {
1644 // If both the condition and the source types are integer, we can generate
1645 // a CMOV to implement Select.
1646 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001647 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001648 DCHECK(locations->InAt(0).Equals(locations->Out()));
1649
1650 HInstruction* select_condition = select->GetCondition();
1651 Condition cond = kNotEqual;
1652
1653 // Figure out how to test the 'condition'.
1654 if (select_condition->IsCondition()) {
1655 HCondition* condition = select_condition->AsCondition();
1656 if (!condition->IsEmittedAtUseSite()) {
1657 // This was a previously materialized condition.
1658 // Can we use the existing condition code?
1659 if (AreEflagsSetFrom(condition, select)) {
1660 // Materialization was the previous instruction. Condition codes are right.
1661 cond = X86_64IntegerCondition(condition->GetCondition());
1662 } else {
1663 // No, we have to recreate the condition code.
1664 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1665 __ testl(cond_reg, cond_reg);
1666 }
1667 } else {
1668 GenerateCompareTest(condition);
1669 cond = X86_64IntegerCondition(condition->GetCondition());
1670 }
1671 } else {
1672 // Must be a boolean condition, which needs to be compared to 0.
1673 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1674 __ testl(cond_reg, cond_reg);
1675 }
1676
1677 // If the condition is true, overwrite the output, which already contains false.
1678 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001679 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1680 if (value_true_loc.IsRegister()) {
1681 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1682 } else {
1683 __ cmov(cond,
1684 value_false,
1685 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1686 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001687 } else {
1688 NearLabel false_target;
1689 GenerateTestAndBranch<NearLabel>(select,
1690 /* condition_input_index */ 2,
1691 /* true_target */ nullptr,
1692 &false_target);
1693 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1694 __ Bind(&false_target);
1695 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001696}
1697
David Srbecky0cf44932015-12-09 14:09:59 +00001698void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1699 new (GetGraph()->GetArena()) LocationSummary(info);
1700}
1701
David Srbeckyd28f4a02016-03-14 17:14:24 +00001702void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1703 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001704}
1705
1706void CodeGeneratorX86_64::GenerateNop() {
1707 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001708}
1709
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001710void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001711 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001712 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001713 // Handle the long/FP comparisons made in instruction simplification.
1714 switch (cond->InputAt(0)->GetType()) {
1715 case Primitive::kPrimLong:
1716 locations->SetInAt(0, Location::RequiresRegister());
1717 locations->SetInAt(1, Location::Any());
1718 break;
1719 case Primitive::kPrimFloat:
1720 case Primitive::kPrimDouble:
1721 locations->SetInAt(0, Location::RequiresFpuRegister());
1722 locations->SetInAt(1, Location::Any());
1723 break;
1724 default:
1725 locations->SetInAt(0, Location::RequiresRegister());
1726 locations->SetInAt(1, Location::Any());
1727 break;
1728 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001729 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001730 locations->SetOut(Location::RequiresRegister());
1731 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001732}
1733
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001734void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001735 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001736 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001737 }
Mark Mendellc4701932015-04-10 13:18:51 -04001738
1739 LocationSummary* locations = cond->GetLocations();
1740 Location lhs = locations->InAt(0);
1741 Location rhs = locations->InAt(1);
1742 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001743 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001744
1745 switch (cond->InputAt(0)->GetType()) {
1746 default:
1747 // Integer case.
1748
1749 // Clear output register: setcc only sets the low byte.
1750 __ xorl(reg, reg);
1751
1752 if (rhs.IsRegister()) {
1753 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1754 } else if (rhs.IsConstant()) {
1755 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001756 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001757 } else {
1758 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1759 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001760 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001761 return;
1762 case Primitive::kPrimLong:
1763 // Clear output register: setcc only sets the low byte.
1764 __ xorl(reg, reg);
1765
1766 if (rhs.IsRegister()) {
1767 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1768 } else if (rhs.IsConstant()) {
1769 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001770 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001771 } else {
1772 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1773 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001774 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001775 return;
1776 case Primitive::kPrimFloat: {
1777 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1778 if (rhs.IsConstant()) {
1779 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1780 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1781 } else if (rhs.IsStackSlot()) {
1782 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1783 } else {
1784 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1785 }
1786 GenerateFPJumps(cond, &true_label, &false_label);
1787 break;
1788 }
1789 case Primitive::kPrimDouble: {
1790 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1791 if (rhs.IsConstant()) {
1792 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1793 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1794 } else if (rhs.IsDoubleStackSlot()) {
1795 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1796 } else {
1797 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1798 }
1799 GenerateFPJumps(cond, &true_label, &false_label);
1800 break;
1801 }
1802 }
1803
1804 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001805 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001806
Roland Levillain4fa13f62015-07-06 18:11:54 +01001807 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001808 __ Bind(&false_label);
1809 __ xorl(reg, reg);
1810 __ jmp(&done_label);
1811
Roland Levillain4fa13f62015-07-06 18:11:54 +01001812 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001813 __ Bind(&true_label);
1814 __ movl(reg, Immediate(1));
1815 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001816}
1817
1818void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001819 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001820}
1821
1822void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001823 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001824}
1825
1826void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001827 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001828}
1829
1830void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001831 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001832}
1833
1834void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001835 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001836}
1837
1838void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001839 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001840}
1841
1842void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001843 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001844}
1845
1846void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001847 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001848}
1849
1850void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001851 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001852}
1853
1854void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001855 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001856}
1857
1858void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001859 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001860}
1861
1862void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001863 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001864}
1865
Aart Bike9f37602015-10-09 11:15:55 -07001866void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001867 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001868}
1869
1870void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001871 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001872}
1873
1874void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001875 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001876}
1877
1878void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001879 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001880}
1881
1882void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001883 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001884}
1885
1886void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001887 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001888}
1889
1890void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001891 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001892}
1893
1894void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001895 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001896}
1897
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001898void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001899 LocationSummary* locations =
1900 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001901 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001902 case Primitive::kPrimBoolean:
1903 case Primitive::kPrimByte:
1904 case Primitive::kPrimShort:
1905 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001906 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001907 case Primitive::kPrimLong: {
1908 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001909 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001910 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1911 break;
1912 }
1913 case Primitive::kPrimFloat:
1914 case Primitive::kPrimDouble: {
1915 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001916 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001917 locations->SetOut(Location::RequiresRegister());
1918 break;
1919 }
1920 default:
1921 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1922 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001923}
1924
1925void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001926 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001927 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001928 Location left = locations->InAt(0);
1929 Location right = locations->InAt(1);
1930
Mark Mendell0c9497d2015-08-21 09:30:05 -04001931 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001932 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001933 Condition less_cond = kLess;
1934
Calin Juravleddb7df22014-11-25 20:56:51 +00001935 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001936 case Primitive::kPrimBoolean:
1937 case Primitive::kPrimByte:
1938 case Primitive::kPrimShort:
1939 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001940 case Primitive::kPrimInt: {
1941 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1942 if (right.IsConstant()) {
1943 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1944 codegen_->Compare32BitValue(left_reg, value);
1945 } else if (right.IsStackSlot()) {
1946 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1947 } else {
1948 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1949 }
1950 break;
1951 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001952 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001953 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1954 if (right.IsConstant()) {
1955 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001956 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001957 } else if (right.IsDoubleStackSlot()) {
1958 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001959 } else {
1960 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1961 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001962 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001963 }
1964 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001965 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1966 if (right.IsConstant()) {
1967 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1968 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1969 } else if (right.IsStackSlot()) {
1970 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1971 } else {
1972 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1973 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001974 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001975 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001976 break;
1977 }
1978 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001979 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1980 if (right.IsConstant()) {
1981 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1982 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1983 } else if (right.IsDoubleStackSlot()) {
1984 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1985 } else {
1986 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1987 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001988 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001989 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001990 break;
1991 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001992 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001993 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001994 }
Aart Bika19616e2016-02-01 18:57:58 -08001995
Calin Juravleddb7df22014-11-25 20:56:51 +00001996 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001997 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001998 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001999
Calin Juravle91debbc2014-11-26 19:01:09 +00002000 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002001 __ movl(out, Immediate(1));
2002 __ jmp(&done);
2003
2004 __ Bind(&less);
2005 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002006
2007 __ Bind(&done);
2008}
2009
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002010void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002011 LocationSummary* locations =
2012 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002013 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002014}
2015
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002016void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002017 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002018}
2019
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002020void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2021 LocationSummary* locations =
2022 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2023 locations->SetOut(Location::ConstantLocation(constant));
2024}
2025
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002026void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002027 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002028}
2029
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002030void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002031 LocationSummary* locations =
2032 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002033 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002034}
2035
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002036void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002037 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002038}
2039
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002040void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2041 LocationSummary* locations =
2042 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2043 locations->SetOut(Location::ConstantLocation(constant));
2044}
2045
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002046void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002047 // Will be generated at use site.
2048}
2049
2050void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2051 LocationSummary* locations =
2052 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2053 locations->SetOut(Location::ConstantLocation(constant));
2054}
2055
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002056void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2057 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002058 // Will be generated at use site.
2059}
2060
Calin Juravle27df7582015-04-17 19:12:31 +01002061void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2062 memory_barrier->SetLocations(nullptr);
2063}
2064
2065void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002066 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002067}
2068
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002069void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2070 ret->SetLocations(nullptr);
2071}
2072
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002073void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002074 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002075}
2076
2077void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002078 LocationSummary* locations =
2079 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002080 switch (ret->InputAt(0)->GetType()) {
2081 case Primitive::kPrimBoolean:
2082 case Primitive::kPrimByte:
2083 case Primitive::kPrimChar:
2084 case Primitive::kPrimShort:
2085 case Primitive::kPrimInt:
2086 case Primitive::kPrimNot:
2087 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002088 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002089 break;
2090
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002091 case Primitive::kPrimFloat:
2092 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002093 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002094 break;
2095
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002096 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002097 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002098 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002099}
2100
2101void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2102 if (kIsDebugBuild) {
2103 switch (ret->InputAt(0)->GetType()) {
2104 case Primitive::kPrimBoolean:
2105 case Primitive::kPrimByte:
2106 case Primitive::kPrimChar:
2107 case Primitive::kPrimShort:
2108 case Primitive::kPrimInt:
2109 case Primitive::kPrimNot:
2110 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002111 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002112 break;
2113
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002114 case Primitive::kPrimFloat:
2115 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002116 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002117 XMM0);
2118 break;
2119
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002120 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002121 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002122 }
2123 }
2124 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002125}
2126
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002127Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2128 switch (type) {
2129 case Primitive::kPrimBoolean:
2130 case Primitive::kPrimByte:
2131 case Primitive::kPrimChar:
2132 case Primitive::kPrimShort:
2133 case Primitive::kPrimInt:
2134 case Primitive::kPrimNot:
2135 case Primitive::kPrimLong:
2136 return Location::RegisterLocation(RAX);
2137
2138 case Primitive::kPrimVoid:
2139 return Location::NoLocation();
2140
2141 case Primitive::kPrimDouble:
2142 case Primitive::kPrimFloat:
2143 return Location::FpuRegisterLocation(XMM0);
2144 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002145
2146 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002147}
2148
2149Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2150 return Location::RegisterLocation(kMethodRegisterArgument);
2151}
2152
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002153Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002154 switch (type) {
2155 case Primitive::kPrimBoolean:
2156 case Primitive::kPrimByte:
2157 case Primitive::kPrimChar:
2158 case Primitive::kPrimShort:
2159 case Primitive::kPrimInt:
2160 case Primitive::kPrimNot: {
2161 uint32_t index = gp_index_++;
2162 stack_index_++;
2163 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002164 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002165 } else {
2166 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2167 }
2168 }
2169
2170 case Primitive::kPrimLong: {
2171 uint32_t index = gp_index_;
2172 stack_index_ += 2;
2173 if (index < calling_convention.GetNumberOfRegisters()) {
2174 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002175 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002176 } else {
2177 gp_index_ += 2;
2178 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2179 }
2180 }
2181
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002182 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002183 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002184 stack_index_++;
2185 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002186 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002187 } else {
2188 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2189 }
2190 }
2191
2192 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002193 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002194 stack_index_ += 2;
2195 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002196 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002197 } else {
2198 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2199 }
2200 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002201
2202 case Primitive::kPrimVoid:
2203 LOG(FATAL) << "Unexpected parameter type " << type;
2204 break;
2205 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002206 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002207}
2208
Calin Juravle175dc732015-08-25 15:42:32 +01002209void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2210 // The trampoline uses the same calling convention as dex calling conventions,
2211 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2212 // the method_idx.
2213 HandleInvoke(invoke);
2214}
2215
2216void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2217 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2218}
2219
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002220void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002221 // Explicit clinit checks triggered by static invokes must have been pruned by
2222 // art::PrepareForRegisterAllocation.
2223 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002224
Mark Mendellfb8d2792015-03-31 22:16:59 -04002225 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002226 if (intrinsic.TryDispatch(invoke)) {
2227 return;
2228 }
2229
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002230 HandleInvoke(invoke);
2231}
2232
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002233static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2234 if (invoke->GetLocations()->Intrinsified()) {
2235 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2236 intrinsic.Dispatch(invoke);
2237 return true;
2238 }
2239 return false;
2240}
2241
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002242void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002243 // Explicit clinit checks triggered by static invokes must have been pruned by
2244 // art::PrepareForRegisterAllocation.
2245 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002246
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002247 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2248 return;
2249 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002250
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002251 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002252 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002253 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002254 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002255}
2256
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002257void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002258 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002259 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002260}
2261
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002262void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002263 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002264 if (intrinsic.TryDispatch(invoke)) {
2265 return;
2266 }
2267
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002268 HandleInvoke(invoke);
2269}
2270
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002271void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002272 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2273 return;
2274 }
2275
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002276 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002277 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002278 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002279}
2280
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002281void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2282 HandleInvoke(invoke);
2283 // Add the hidden argument.
2284 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2285}
2286
2287void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2288 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002289 LocationSummary* locations = invoke->GetLocations();
2290 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2291 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002292 Location receiver = locations->InAt(0);
2293 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2294
Roland Levillain0d5a2812015-11-13 10:07:31 +00002295 // Set the hidden argument. This is safe to do this here, as RAX
2296 // won't be modified thereafter, before the `call` instruction.
2297 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002298 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002299
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002300 if (receiver.IsStackSlot()) {
2301 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002302 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002303 __ movl(temp, Address(temp, class_offset));
2304 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002305 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002306 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002307 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002308 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002309 // Instead of simply (possibly) unpoisoning `temp` here, we should
2310 // emit a read barrier for the previous class reference load.
2311 // However this is not required in practice, as this is an
2312 // intermediate/temporary reference and because the current
2313 // concurrent copying collector keeps the from-space memory
2314 // intact/accessible until the end of the marking phase (the
2315 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002316 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002317 // temp = temp->GetAddressOfIMT()
2318 __ movq(temp,
2319 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2320 // temp = temp->GetImtEntryAt(method_offset);
2321 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002322 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002323 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002324 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002325 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002326 __ call(Address(
2327 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002328
2329 DCHECK(!codegen_->IsLeafMethod());
2330 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2331}
2332
Roland Levillain88cb1752014-10-20 16:36:47 +01002333void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2334 LocationSummary* locations =
2335 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2336 switch (neg->GetResultType()) {
2337 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002338 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002339 locations->SetInAt(0, Location::RequiresRegister());
2340 locations->SetOut(Location::SameAsFirstInput());
2341 break;
2342
Roland Levillain88cb1752014-10-20 16:36:47 +01002343 case Primitive::kPrimFloat:
2344 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002345 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002346 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002347 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002348 break;
2349
2350 default:
2351 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2352 }
2353}
2354
2355void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2356 LocationSummary* locations = neg->GetLocations();
2357 Location out = locations->Out();
2358 Location in = locations->InAt(0);
2359 switch (neg->GetResultType()) {
2360 case Primitive::kPrimInt:
2361 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002362 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002363 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002364 break;
2365
2366 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002367 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002368 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002369 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002370 break;
2371
Roland Levillain5368c212014-11-27 15:03:41 +00002372 case Primitive::kPrimFloat: {
2373 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002374 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002375 // Implement float negation with an exclusive or with value
2376 // 0x80000000 (mask for bit 31, representing the sign of a
2377 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002378 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002379 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002380 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002381 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002382
Roland Levillain5368c212014-11-27 15:03:41 +00002383 case Primitive::kPrimDouble: {
2384 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002385 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002386 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002387 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002388 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002389 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002390 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002391 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002392 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002393
2394 default:
2395 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2396 }
2397}
2398
Roland Levillaindff1f282014-11-05 14:15:05 +00002399void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2400 LocationSummary* locations =
2401 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2402 Primitive::Type result_type = conversion->GetResultType();
2403 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002404 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002405
David Brazdilb2bd1c52015-03-25 11:17:37 +00002406 // The Java language does not allow treating boolean as an integral type but
2407 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002408
Roland Levillaindff1f282014-11-05 14:15:05 +00002409 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002410 case Primitive::kPrimByte:
2411 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002412 case Primitive::kPrimLong:
2413 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002414 case Primitive::kPrimBoolean:
2415 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002416 case Primitive::kPrimShort:
2417 case Primitive::kPrimInt:
2418 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002419 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002420 locations->SetInAt(0, Location::Any());
2421 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2422 break;
2423
2424 default:
2425 LOG(FATAL) << "Unexpected type conversion from " << input_type
2426 << " to " << result_type;
2427 }
2428 break;
2429
Roland Levillain01a8d712014-11-14 16:27:39 +00002430 case Primitive::kPrimShort:
2431 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002432 case Primitive::kPrimLong:
2433 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002434 case Primitive::kPrimBoolean:
2435 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002436 case Primitive::kPrimByte:
2437 case Primitive::kPrimInt:
2438 case Primitive::kPrimChar:
2439 // Processing a Dex `int-to-short' instruction.
2440 locations->SetInAt(0, Location::Any());
2441 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2442 break;
2443
2444 default:
2445 LOG(FATAL) << "Unexpected type conversion from " << input_type
2446 << " to " << result_type;
2447 }
2448 break;
2449
Roland Levillain946e1432014-11-11 17:35:19 +00002450 case Primitive::kPrimInt:
2451 switch (input_type) {
2452 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002453 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002454 locations->SetInAt(0, Location::Any());
2455 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2456 break;
2457
2458 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002459 // Processing a Dex `float-to-int' instruction.
2460 locations->SetInAt(0, Location::RequiresFpuRegister());
2461 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002462 break;
2463
Roland Levillain946e1432014-11-11 17:35:19 +00002464 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002465 // Processing a Dex `double-to-int' instruction.
2466 locations->SetInAt(0, Location::RequiresFpuRegister());
2467 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002468 break;
2469
2470 default:
2471 LOG(FATAL) << "Unexpected type conversion from " << input_type
2472 << " to " << result_type;
2473 }
2474 break;
2475
Roland Levillaindff1f282014-11-05 14:15:05 +00002476 case Primitive::kPrimLong:
2477 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002478 case Primitive::kPrimBoolean:
2479 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002480 case Primitive::kPrimByte:
2481 case Primitive::kPrimShort:
2482 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002483 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002484 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002485 // TODO: We would benefit from a (to-be-implemented)
2486 // Location::RegisterOrStackSlot requirement for this input.
2487 locations->SetInAt(0, Location::RequiresRegister());
2488 locations->SetOut(Location::RequiresRegister());
2489 break;
2490
2491 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002492 // Processing a Dex `float-to-long' instruction.
2493 locations->SetInAt(0, Location::RequiresFpuRegister());
2494 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002495 break;
2496
Roland Levillaindff1f282014-11-05 14:15:05 +00002497 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002498 // Processing a Dex `double-to-long' instruction.
2499 locations->SetInAt(0, Location::RequiresFpuRegister());
2500 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002501 break;
2502
2503 default:
2504 LOG(FATAL) << "Unexpected type conversion from " << input_type
2505 << " to " << result_type;
2506 }
2507 break;
2508
Roland Levillain981e4542014-11-14 11:47:14 +00002509 case Primitive::kPrimChar:
2510 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002511 case Primitive::kPrimLong:
2512 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002513 case Primitive::kPrimBoolean:
2514 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002515 case Primitive::kPrimByte:
2516 case Primitive::kPrimShort:
2517 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002518 // Processing a Dex `int-to-char' instruction.
2519 locations->SetInAt(0, Location::Any());
2520 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2521 break;
2522
2523 default:
2524 LOG(FATAL) << "Unexpected type conversion from " << input_type
2525 << " to " << result_type;
2526 }
2527 break;
2528
Roland Levillaindff1f282014-11-05 14:15:05 +00002529 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002530 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002531 case Primitive::kPrimBoolean:
2532 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002533 case Primitive::kPrimByte:
2534 case Primitive::kPrimShort:
2535 case Primitive::kPrimInt:
2536 case Primitive::kPrimChar:
2537 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002538 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002539 locations->SetOut(Location::RequiresFpuRegister());
2540 break;
2541
2542 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002543 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002544 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002545 locations->SetOut(Location::RequiresFpuRegister());
2546 break;
2547
Roland Levillaincff13742014-11-17 14:32:17 +00002548 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002549 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002550 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002551 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002552 break;
2553
2554 default:
2555 LOG(FATAL) << "Unexpected type conversion from " << input_type
2556 << " to " << result_type;
2557 };
2558 break;
2559
Roland Levillaindff1f282014-11-05 14:15:05 +00002560 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002561 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002562 case Primitive::kPrimBoolean:
2563 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002564 case Primitive::kPrimByte:
2565 case Primitive::kPrimShort:
2566 case Primitive::kPrimInt:
2567 case Primitive::kPrimChar:
2568 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002569 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002570 locations->SetOut(Location::RequiresFpuRegister());
2571 break;
2572
2573 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002574 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002575 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002576 locations->SetOut(Location::RequiresFpuRegister());
2577 break;
2578
Roland Levillaincff13742014-11-17 14:32:17 +00002579 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002580 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002581 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002582 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002583 break;
2584
2585 default:
2586 LOG(FATAL) << "Unexpected type conversion from " << input_type
2587 << " to " << result_type;
2588 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002589 break;
2590
2591 default:
2592 LOG(FATAL) << "Unexpected type conversion from " << input_type
2593 << " to " << result_type;
2594 }
2595}
2596
2597void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2598 LocationSummary* locations = conversion->GetLocations();
2599 Location out = locations->Out();
2600 Location in = locations->InAt(0);
2601 Primitive::Type result_type = conversion->GetResultType();
2602 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002603 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002604 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002605 case Primitive::kPrimByte:
2606 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002607 case Primitive::kPrimLong:
2608 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002609 case Primitive::kPrimBoolean:
2610 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002611 case Primitive::kPrimShort:
2612 case Primitive::kPrimInt:
2613 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002614 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002615 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002616 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002617 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002618 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002619 Address(CpuRegister(RSP), in.GetStackIndex()));
2620 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002621 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002622 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002623 }
2624 break;
2625
2626 default:
2627 LOG(FATAL) << "Unexpected type conversion from " << input_type
2628 << " to " << result_type;
2629 }
2630 break;
2631
Roland Levillain01a8d712014-11-14 16:27:39 +00002632 case Primitive::kPrimShort:
2633 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002634 case Primitive::kPrimLong:
2635 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002636 case Primitive::kPrimBoolean:
2637 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002638 case Primitive::kPrimByte:
2639 case Primitive::kPrimInt:
2640 case Primitive::kPrimChar:
2641 // Processing a Dex `int-to-short' instruction.
2642 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002643 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002644 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002645 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002646 Address(CpuRegister(RSP), in.GetStackIndex()));
2647 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002648 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002649 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002650 }
2651 break;
2652
2653 default:
2654 LOG(FATAL) << "Unexpected type conversion from " << input_type
2655 << " to " << result_type;
2656 }
2657 break;
2658
Roland Levillain946e1432014-11-11 17:35:19 +00002659 case Primitive::kPrimInt:
2660 switch (input_type) {
2661 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002662 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002663 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002664 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002665 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002666 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002667 Address(CpuRegister(RSP), in.GetStackIndex()));
2668 } else {
2669 DCHECK(in.IsConstant());
2670 DCHECK(in.GetConstant()->IsLongConstant());
2671 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002672 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002673 }
2674 break;
2675
Roland Levillain3f8f9362014-12-02 17:45:01 +00002676 case Primitive::kPrimFloat: {
2677 // Processing a Dex `float-to-int' instruction.
2678 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2679 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002680 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002681
2682 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002683 // if input >= (float)INT_MAX goto done
2684 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002685 __ j(kAboveEqual, &done);
2686 // if input == NaN goto nan
2687 __ j(kUnordered, &nan);
2688 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002689 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002690 __ jmp(&done);
2691 __ Bind(&nan);
2692 // output = 0
2693 __ xorl(output, output);
2694 __ Bind(&done);
2695 break;
2696 }
2697
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002698 case Primitive::kPrimDouble: {
2699 // Processing a Dex `double-to-int' instruction.
2700 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2701 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002702 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002703
2704 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002705 // if input >= (double)INT_MAX goto done
2706 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002707 __ j(kAboveEqual, &done);
2708 // if input == NaN goto nan
2709 __ j(kUnordered, &nan);
2710 // output = double-to-int-truncate(input)
2711 __ cvttsd2si(output, input);
2712 __ jmp(&done);
2713 __ Bind(&nan);
2714 // output = 0
2715 __ xorl(output, output);
2716 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002717 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002718 }
Roland Levillain946e1432014-11-11 17:35:19 +00002719
2720 default:
2721 LOG(FATAL) << "Unexpected type conversion from " << input_type
2722 << " to " << result_type;
2723 }
2724 break;
2725
Roland Levillaindff1f282014-11-05 14:15:05 +00002726 case Primitive::kPrimLong:
2727 switch (input_type) {
2728 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002729 case Primitive::kPrimBoolean:
2730 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002731 case Primitive::kPrimByte:
2732 case Primitive::kPrimShort:
2733 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002734 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002735 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002736 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002737 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002738 break;
2739
Roland Levillain624279f2014-12-04 11:54:28 +00002740 case Primitive::kPrimFloat: {
2741 // Processing a Dex `float-to-long' instruction.
2742 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2743 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002744 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002745
Mark Mendell92e83bf2015-05-07 11:25:03 -04002746 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002747 // if input >= (float)LONG_MAX goto done
2748 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002749 __ j(kAboveEqual, &done);
2750 // if input == NaN goto nan
2751 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002752 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002753 __ cvttss2si(output, input, true);
2754 __ jmp(&done);
2755 __ Bind(&nan);
2756 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002757 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002758 __ Bind(&done);
2759 break;
2760 }
2761
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002762 case Primitive::kPrimDouble: {
2763 // Processing a Dex `double-to-long' instruction.
2764 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2765 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002766 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002767
Mark Mendell92e83bf2015-05-07 11:25:03 -04002768 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002769 // if input >= (double)LONG_MAX goto done
2770 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002771 __ j(kAboveEqual, &done);
2772 // if input == NaN goto nan
2773 __ j(kUnordered, &nan);
2774 // output = double-to-long-truncate(input)
2775 __ cvttsd2si(output, input, true);
2776 __ jmp(&done);
2777 __ Bind(&nan);
2778 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002779 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002780 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002781 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002782 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002783
2784 default:
2785 LOG(FATAL) << "Unexpected type conversion from " << input_type
2786 << " to " << result_type;
2787 }
2788 break;
2789
Roland Levillain981e4542014-11-14 11:47:14 +00002790 case Primitive::kPrimChar:
2791 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002792 case Primitive::kPrimLong:
2793 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002794 case Primitive::kPrimBoolean:
2795 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002796 case Primitive::kPrimByte:
2797 case Primitive::kPrimShort:
2798 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002799 // Processing a Dex `int-to-char' instruction.
2800 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002801 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002802 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002803 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002804 Address(CpuRegister(RSP), in.GetStackIndex()));
2805 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002806 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002807 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002808 }
2809 break;
2810
2811 default:
2812 LOG(FATAL) << "Unexpected type conversion from " << input_type
2813 << " to " << result_type;
2814 }
2815 break;
2816
Roland Levillaindff1f282014-11-05 14:15:05 +00002817 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002818 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002819 case Primitive::kPrimBoolean:
2820 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002821 case Primitive::kPrimByte:
2822 case Primitive::kPrimShort:
2823 case Primitive::kPrimInt:
2824 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002825 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002826 if (in.IsRegister()) {
2827 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2828 } else if (in.IsConstant()) {
2829 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2830 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002831 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002832 } else {
2833 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2834 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2835 }
Roland Levillaincff13742014-11-17 14:32:17 +00002836 break;
2837
2838 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002839 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002840 if (in.IsRegister()) {
2841 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2842 } else if (in.IsConstant()) {
2843 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2844 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002845 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002846 } else {
2847 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2848 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2849 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002850 break;
2851
Roland Levillaincff13742014-11-17 14:32:17 +00002852 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002853 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002854 if (in.IsFpuRegister()) {
2855 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2856 } else if (in.IsConstant()) {
2857 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2858 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002859 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002860 } else {
2861 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2862 Address(CpuRegister(RSP), in.GetStackIndex()));
2863 }
Roland Levillaincff13742014-11-17 14:32:17 +00002864 break;
2865
2866 default:
2867 LOG(FATAL) << "Unexpected type conversion from " << input_type
2868 << " to " << result_type;
2869 };
2870 break;
2871
Roland Levillaindff1f282014-11-05 14:15:05 +00002872 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002873 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002874 case Primitive::kPrimBoolean:
2875 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002876 case Primitive::kPrimByte:
2877 case Primitive::kPrimShort:
2878 case Primitive::kPrimInt:
2879 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002880 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002881 if (in.IsRegister()) {
2882 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2883 } else if (in.IsConstant()) {
2884 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2885 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002886 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002887 } else {
2888 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2889 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2890 }
Roland Levillaincff13742014-11-17 14:32:17 +00002891 break;
2892
2893 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002894 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002895 if (in.IsRegister()) {
2896 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2897 } else if (in.IsConstant()) {
2898 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2899 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002900 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002901 } else {
2902 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2903 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2904 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002905 break;
2906
Roland Levillaincff13742014-11-17 14:32:17 +00002907 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002908 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002909 if (in.IsFpuRegister()) {
2910 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2911 } else if (in.IsConstant()) {
2912 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2913 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002914 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002915 } else {
2916 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2917 Address(CpuRegister(RSP), in.GetStackIndex()));
2918 }
Roland Levillaincff13742014-11-17 14:32:17 +00002919 break;
2920
2921 default:
2922 LOG(FATAL) << "Unexpected type conversion from " << input_type
2923 << " to " << result_type;
2924 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002925 break;
2926
2927 default:
2928 LOG(FATAL) << "Unexpected type conversion from " << input_type
2929 << " to " << result_type;
2930 }
2931}
2932
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002933void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002934 LocationSummary* locations =
2935 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002936 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002937 case Primitive::kPrimInt: {
2938 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002939 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2940 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002941 break;
2942 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002943
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002944 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002945 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002946 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002947 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002948 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002949 break;
2950 }
2951
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002952 case Primitive::kPrimDouble:
2953 case Primitive::kPrimFloat: {
2954 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002955 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002956 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002957 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002958 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002959
2960 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002961 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002962 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002963}
2964
2965void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2966 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002967 Location first = locations->InAt(0);
2968 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002969 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002970
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002971 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002972 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002973 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002974 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2975 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002976 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2977 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002978 } else {
2979 __ leal(out.AsRegister<CpuRegister>(), Address(
2980 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2981 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002982 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002983 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2984 __ addl(out.AsRegister<CpuRegister>(),
2985 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2986 } else {
2987 __ leal(out.AsRegister<CpuRegister>(), Address(
2988 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2989 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002990 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002991 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002992 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002993 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002994 break;
2995 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002996
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002997 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002998 if (second.IsRegister()) {
2999 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3000 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003001 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3002 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003003 } else {
3004 __ leaq(out.AsRegister<CpuRegister>(), Address(
3005 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3006 }
3007 } else {
3008 DCHECK(second.IsConstant());
3009 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3010 int32_t int32_value = Low32Bits(value);
3011 DCHECK_EQ(int32_value, value);
3012 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3013 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3014 } else {
3015 __ leaq(out.AsRegister<CpuRegister>(), Address(
3016 first.AsRegister<CpuRegister>(), int32_value));
3017 }
3018 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003019 break;
3020 }
3021
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003022 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003023 if (second.IsFpuRegister()) {
3024 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3025 } else if (second.IsConstant()) {
3026 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003027 codegen_->LiteralFloatAddress(
3028 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003029 } else {
3030 DCHECK(second.IsStackSlot());
3031 __ addss(first.AsFpuRegister<XmmRegister>(),
3032 Address(CpuRegister(RSP), second.GetStackIndex()));
3033 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003034 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003035 }
3036
3037 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003038 if (second.IsFpuRegister()) {
3039 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3040 } else if (second.IsConstant()) {
3041 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003042 codegen_->LiteralDoubleAddress(
3043 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003044 } else {
3045 DCHECK(second.IsDoubleStackSlot());
3046 __ addsd(first.AsFpuRegister<XmmRegister>(),
3047 Address(CpuRegister(RSP), second.GetStackIndex()));
3048 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003049 break;
3050 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003051
3052 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003053 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003054 }
3055}
3056
3057void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003058 LocationSummary* locations =
3059 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003060 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003061 case Primitive::kPrimInt: {
3062 locations->SetInAt(0, Location::RequiresRegister());
3063 locations->SetInAt(1, Location::Any());
3064 locations->SetOut(Location::SameAsFirstInput());
3065 break;
3066 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003067 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003068 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003069 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003070 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003071 break;
3072 }
Calin Juravle11351682014-10-23 15:38:15 +01003073 case Primitive::kPrimFloat:
3074 case Primitive::kPrimDouble: {
3075 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003076 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003077 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003078 break;
Calin Juravle11351682014-10-23 15:38:15 +01003079 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003080 default:
Calin Juravle11351682014-10-23 15:38:15 +01003081 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003082 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003083}
3084
3085void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3086 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003087 Location first = locations->InAt(0);
3088 Location second = locations->InAt(1);
3089 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003090 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003091 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003092 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003093 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003094 } else if (second.IsConstant()) {
3095 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003096 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003097 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003098 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003099 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003100 break;
3101 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003102 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003103 if (second.IsConstant()) {
3104 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3105 DCHECK(IsInt<32>(value));
3106 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3107 } else {
3108 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3109 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003110 break;
3111 }
3112
Calin Juravle11351682014-10-23 15:38:15 +01003113 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003114 if (second.IsFpuRegister()) {
3115 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3116 } else if (second.IsConstant()) {
3117 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003118 codegen_->LiteralFloatAddress(
3119 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003120 } else {
3121 DCHECK(second.IsStackSlot());
3122 __ subss(first.AsFpuRegister<XmmRegister>(),
3123 Address(CpuRegister(RSP), second.GetStackIndex()));
3124 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003125 break;
Calin Juravle11351682014-10-23 15:38:15 +01003126 }
3127
3128 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003129 if (second.IsFpuRegister()) {
3130 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3131 } else if (second.IsConstant()) {
3132 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003133 codegen_->LiteralDoubleAddress(
3134 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003135 } else {
3136 DCHECK(second.IsDoubleStackSlot());
3137 __ subsd(first.AsFpuRegister<XmmRegister>(),
3138 Address(CpuRegister(RSP), second.GetStackIndex()));
3139 }
Calin Juravle11351682014-10-23 15:38:15 +01003140 break;
3141 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003142
3143 default:
Calin Juravle11351682014-10-23 15:38:15 +01003144 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003145 }
3146}
3147
Calin Juravle34bacdf2014-10-07 20:23:36 +01003148void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3149 LocationSummary* locations =
3150 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3151 switch (mul->GetResultType()) {
3152 case Primitive::kPrimInt: {
3153 locations->SetInAt(0, Location::RequiresRegister());
3154 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003155 if (mul->InputAt(1)->IsIntConstant()) {
3156 // Can use 3 operand multiply.
3157 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3158 } else {
3159 locations->SetOut(Location::SameAsFirstInput());
3160 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003161 break;
3162 }
3163 case Primitive::kPrimLong: {
3164 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003165 locations->SetInAt(1, Location::Any());
3166 if (mul->InputAt(1)->IsLongConstant() &&
3167 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003168 // Can use 3 operand multiply.
3169 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3170 } else {
3171 locations->SetOut(Location::SameAsFirstInput());
3172 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003173 break;
3174 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003175 case Primitive::kPrimFloat:
3176 case Primitive::kPrimDouble: {
3177 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003178 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003179 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003180 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003181 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003182
3183 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003184 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003185 }
3186}
3187
3188void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3189 LocationSummary* locations = mul->GetLocations();
3190 Location first = locations->InAt(0);
3191 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003192 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003193 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003194 case Primitive::kPrimInt:
3195 // The constant may have ended up in a register, so test explicitly to avoid
3196 // problems where the output may not be the same as the first operand.
3197 if (mul->InputAt(1)->IsIntConstant()) {
3198 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3199 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3200 } else if (second.IsRegister()) {
3201 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003202 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003203 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003204 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003205 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003206 __ imull(first.AsRegister<CpuRegister>(),
3207 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003208 }
3209 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003210 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003211 // The constant may have ended up in a register, so test explicitly to avoid
3212 // problems where the output may not be the same as the first operand.
3213 if (mul->InputAt(1)->IsLongConstant()) {
3214 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3215 if (IsInt<32>(value)) {
3216 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3217 Immediate(static_cast<int32_t>(value)));
3218 } else {
3219 // Have to use the constant area.
3220 DCHECK(first.Equals(out));
3221 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3222 }
3223 } else if (second.IsRegister()) {
3224 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003225 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003226 } else {
3227 DCHECK(second.IsDoubleStackSlot());
3228 DCHECK(first.Equals(out));
3229 __ imulq(first.AsRegister<CpuRegister>(),
3230 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003231 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003232 break;
3233 }
3234
Calin Juravleb5bfa962014-10-21 18:02:24 +01003235 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003236 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003237 if (second.IsFpuRegister()) {
3238 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3239 } else if (second.IsConstant()) {
3240 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003241 codegen_->LiteralFloatAddress(
3242 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003243 } else {
3244 DCHECK(second.IsStackSlot());
3245 __ mulss(first.AsFpuRegister<XmmRegister>(),
3246 Address(CpuRegister(RSP), second.GetStackIndex()));
3247 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003248 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003249 }
3250
3251 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003252 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003253 if (second.IsFpuRegister()) {
3254 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3255 } else if (second.IsConstant()) {
3256 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003257 codegen_->LiteralDoubleAddress(
3258 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003259 } else {
3260 DCHECK(second.IsDoubleStackSlot());
3261 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3262 Address(CpuRegister(RSP), second.GetStackIndex()));
3263 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003264 break;
3265 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003266
3267 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003268 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003269 }
3270}
3271
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003272void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3273 uint32_t stack_adjustment, bool is_float) {
3274 if (source.IsStackSlot()) {
3275 DCHECK(is_float);
3276 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3277 } else if (source.IsDoubleStackSlot()) {
3278 DCHECK(!is_float);
3279 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3280 } else {
3281 // Write the value to the temporary location on the stack and load to FP stack.
3282 if (is_float) {
3283 Location stack_temp = Location::StackSlot(temp_offset);
3284 codegen_->Move(stack_temp, source);
3285 __ flds(Address(CpuRegister(RSP), temp_offset));
3286 } else {
3287 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3288 codegen_->Move(stack_temp, source);
3289 __ fldl(Address(CpuRegister(RSP), temp_offset));
3290 }
3291 }
3292}
3293
3294void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3295 Primitive::Type type = rem->GetResultType();
3296 bool is_float = type == Primitive::kPrimFloat;
3297 size_t elem_size = Primitive::ComponentSize(type);
3298 LocationSummary* locations = rem->GetLocations();
3299 Location first = locations->InAt(0);
3300 Location second = locations->InAt(1);
3301 Location out = locations->Out();
3302
3303 // Create stack space for 2 elements.
3304 // TODO: enhance register allocator to ask for stack temporaries.
3305 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3306
3307 // Load the values to the FP stack in reverse order, using temporaries if needed.
3308 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3309 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3310
3311 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003312 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003313 __ Bind(&retry);
3314 __ fprem();
3315
3316 // Move FP status to AX.
3317 __ fstsw();
3318
3319 // And see if the argument reduction is complete. This is signaled by the
3320 // C2 FPU flag bit set to 0.
3321 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3322 __ j(kNotEqual, &retry);
3323
3324 // We have settled on the final value. Retrieve it into an XMM register.
3325 // Store FP top of stack to real stack.
3326 if (is_float) {
3327 __ fsts(Address(CpuRegister(RSP), 0));
3328 } else {
3329 __ fstl(Address(CpuRegister(RSP), 0));
3330 }
3331
3332 // Pop the 2 items from the FP stack.
3333 __ fucompp();
3334
3335 // Load the value from the stack into an XMM register.
3336 DCHECK(out.IsFpuRegister()) << out;
3337 if (is_float) {
3338 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3339 } else {
3340 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3341 }
3342
3343 // And remove the temporary stack space we allocated.
3344 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3345}
3346
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003347void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3348 DCHECK(instruction->IsDiv() || instruction->IsRem());
3349
3350 LocationSummary* locations = instruction->GetLocations();
3351 Location second = locations->InAt(1);
3352 DCHECK(second.IsConstant());
3353
3354 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3355 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003356 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003357
3358 DCHECK(imm == 1 || imm == -1);
3359
3360 switch (instruction->GetResultType()) {
3361 case Primitive::kPrimInt: {
3362 if (instruction->IsRem()) {
3363 __ xorl(output_register, output_register);
3364 } else {
3365 __ movl(output_register, input_register);
3366 if (imm == -1) {
3367 __ negl(output_register);
3368 }
3369 }
3370 break;
3371 }
3372
3373 case Primitive::kPrimLong: {
3374 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003375 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003376 } else {
3377 __ movq(output_register, input_register);
3378 if (imm == -1) {
3379 __ negq(output_register);
3380 }
3381 }
3382 break;
3383 }
3384
3385 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003386 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003387 }
3388}
3389
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003390void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003391 LocationSummary* locations = instruction->GetLocations();
3392 Location second = locations->InAt(1);
3393
3394 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3395 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3396
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003397 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003398 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3399 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003400
3401 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3402
3403 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003404 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003405 __ testl(numerator, numerator);
3406 __ cmov(kGreaterEqual, tmp, numerator);
3407 int shift = CTZ(imm);
3408 __ sarl(tmp, Immediate(shift));
3409
3410 if (imm < 0) {
3411 __ negl(tmp);
3412 }
3413
3414 __ movl(output_register, tmp);
3415 } else {
3416 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3417 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3418
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003419 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003420 __ addq(rdx, numerator);
3421 __ testq(numerator, numerator);
3422 __ cmov(kGreaterEqual, rdx, numerator);
3423 int shift = CTZ(imm);
3424 __ sarq(rdx, Immediate(shift));
3425
3426 if (imm < 0) {
3427 __ negq(rdx);
3428 }
3429
3430 __ movq(output_register, rdx);
3431 }
3432}
3433
3434void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3435 DCHECK(instruction->IsDiv() || instruction->IsRem());
3436
3437 LocationSummary* locations = instruction->GetLocations();
3438 Location second = locations->InAt(1);
3439
3440 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3441 : locations->GetTemp(0).AsRegister<CpuRegister>();
3442 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3443 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3444 : locations->Out().AsRegister<CpuRegister>();
3445 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3446
3447 DCHECK_EQ(RAX, eax.AsRegister());
3448 DCHECK_EQ(RDX, edx.AsRegister());
3449 if (instruction->IsDiv()) {
3450 DCHECK_EQ(RAX, out.AsRegister());
3451 } else {
3452 DCHECK_EQ(RDX, out.AsRegister());
3453 }
3454
3455 int64_t magic;
3456 int shift;
3457
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003458 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003459 if (instruction->GetResultType() == Primitive::kPrimInt) {
3460 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3461
3462 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3463
3464 __ movl(numerator, eax);
3465
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003466 __ movl(eax, Immediate(magic));
3467 __ imull(numerator);
3468
3469 if (imm > 0 && magic < 0) {
3470 __ addl(edx, numerator);
3471 } else if (imm < 0 && magic > 0) {
3472 __ subl(edx, numerator);
3473 }
3474
3475 if (shift != 0) {
3476 __ sarl(edx, Immediate(shift));
3477 }
3478
3479 __ movl(eax, edx);
3480 __ shrl(edx, Immediate(31));
3481 __ addl(edx, eax);
3482
3483 if (instruction->IsRem()) {
3484 __ movl(eax, numerator);
3485 __ imull(edx, Immediate(imm));
3486 __ subl(eax, edx);
3487 __ movl(edx, eax);
3488 } else {
3489 __ movl(eax, edx);
3490 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003491 } else {
3492 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3493
3494 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3495
3496 CpuRegister rax = eax;
3497 CpuRegister rdx = edx;
3498
3499 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3500
3501 // Save the numerator.
3502 __ movq(numerator, rax);
3503
3504 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003505 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003506
3507 // RDX:RAX = magic * numerator
3508 __ imulq(numerator);
3509
3510 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003511 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003512 __ addq(rdx, numerator);
3513 } else if (imm < 0 && magic > 0) {
3514 // RDX -= numerator
3515 __ subq(rdx, numerator);
3516 }
3517
3518 // Shift if needed.
3519 if (shift != 0) {
3520 __ sarq(rdx, Immediate(shift));
3521 }
3522
3523 // RDX += 1 if RDX < 0
3524 __ movq(rax, rdx);
3525 __ shrq(rdx, Immediate(63));
3526 __ addq(rdx, rax);
3527
3528 if (instruction->IsRem()) {
3529 __ movq(rax, numerator);
3530
3531 if (IsInt<32>(imm)) {
3532 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3533 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003534 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003535 }
3536
3537 __ subq(rax, rdx);
3538 __ movq(rdx, rax);
3539 } else {
3540 __ movq(rax, rdx);
3541 }
3542 }
3543}
3544
Calin Juravlebacfec32014-11-14 15:54:36 +00003545void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3546 DCHECK(instruction->IsDiv() || instruction->IsRem());
3547 Primitive::Type type = instruction->GetResultType();
3548 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3549
3550 bool is_div = instruction->IsDiv();
3551 LocationSummary* locations = instruction->GetLocations();
3552
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003553 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3554 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003555
Roland Levillain271ab9c2014-11-27 15:23:57 +00003556 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003557 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003558
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003559 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003560 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003561
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003562 if (imm == 0) {
3563 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3564 } else if (imm == 1 || imm == -1) {
3565 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003566 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003567 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003568 } else {
3569 DCHECK(imm <= -2 || imm >= 2);
3570 GenerateDivRemWithAnyConstant(instruction);
3571 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003572 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003573 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003574 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003575 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003576 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003577
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3579 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3580 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3581 // so it's safe to just use negl instead of more complex comparisons.
3582 if (type == Primitive::kPrimInt) {
3583 __ cmpl(second_reg, Immediate(-1));
3584 __ j(kEqual, slow_path->GetEntryLabel());
3585 // edx:eax <- sign-extended of eax
3586 __ cdq();
3587 // eax = quotient, edx = remainder
3588 __ idivl(second_reg);
3589 } else {
3590 __ cmpq(second_reg, Immediate(-1));
3591 __ j(kEqual, slow_path->GetEntryLabel());
3592 // rdx:rax <- sign-extended of rax
3593 __ cqo();
3594 // rax = quotient, rdx = remainder
3595 __ idivq(second_reg);
3596 }
3597 __ Bind(slow_path->GetExitLabel());
3598 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003599}
3600
Calin Juravle7c4954d2014-10-28 16:57:40 +00003601void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3602 LocationSummary* locations =
3603 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3604 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003605 case Primitive::kPrimInt:
3606 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003607 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003608 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003609 locations->SetOut(Location::SameAsFirstInput());
3610 // Intel uses edx:eax as the dividend.
3611 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003612 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3613 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3614 // output and request another temp.
3615 if (div->InputAt(1)->IsConstant()) {
3616 locations->AddTemp(Location::RequiresRegister());
3617 }
Calin Juravled0d48522014-11-04 16:40:20 +00003618 break;
3619 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003620
Calin Juravle7c4954d2014-10-28 16:57:40 +00003621 case Primitive::kPrimFloat:
3622 case Primitive::kPrimDouble: {
3623 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003624 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003625 locations->SetOut(Location::SameAsFirstInput());
3626 break;
3627 }
3628
3629 default:
3630 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3631 }
3632}
3633
3634void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3635 LocationSummary* locations = div->GetLocations();
3636 Location first = locations->InAt(0);
3637 Location second = locations->InAt(1);
3638 DCHECK(first.Equals(locations->Out()));
3639
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003640 Primitive::Type type = div->GetResultType();
3641 switch (type) {
3642 case Primitive::kPrimInt:
3643 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003644 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003645 break;
3646 }
3647
Calin Juravle7c4954d2014-10-28 16:57:40 +00003648 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003649 if (second.IsFpuRegister()) {
3650 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3651 } else if (second.IsConstant()) {
3652 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003653 codegen_->LiteralFloatAddress(
3654 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003655 } else {
3656 DCHECK(second.IsStackSlot());
3657 __ divss(first.AsFpuRegister<XmmRegister>(),
3658 Address(CpuRegister(RSP), second.GetStackIndex()));
3659 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003660 break;
3661 }
3662
3663 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003664 if (second.IsFpuRegister()) {
3665 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3666 } else if (second.IsConstant()) {
3667 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003668 codegen_->LiteralDoubleAddress(
3669 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003670 } else {
3671 DCHECK(second.IsDoubleStackSlot());
3672 __ divsd(first.AsFpuRegister<XmmRegister>(),
3673 Address(CpuRegister(RSP), second.GetStackIndex()));
3674 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003675 break;
3676 }
3677
3678 default:
3679 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3680 }
3681}
3682
Calin Juravlebacfec32014-11-14 15:54:36 +00003683void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003684 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003685 LocationSummary* locations =
3686 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003687
3688 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003689 case Primitive::kPrimInt:
3690 case Primitive::kPrimLong: {
3691 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003692 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003693 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3694 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003695 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3696 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3697 // output and request another temp.
3698 if (rem->InputAt(1)->IsConstant()) {
3699 locations->AddTemp(Location::RequiresRegister());
3700 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003701 break;
3702 }
3703
3704 case Primitive::kPrimFloat:
3705 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003706 locations->SetInAt(0, Location::Any());
3707 locations->SetInAt(1, Location::Any());
3708 locations->SetOut(Location::RequiresFpuRegister());
3709 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003710 break;
3711 }
3712
3713 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003714 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003715 }
3716}
3717
3718void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3719 Primitive::Type type = rem->GetResultType();
3720 switch (type) {
3721 case Primitive::kPrimInt:
3722 case Primitive::kPrimLong: {
3723 GenerateDivRemIntegral(rem);
3724 break;
3725 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003726 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003727 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003728 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003729 break;
3730 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003731 default:
3732 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3733 }
3734}
3735
Calin Juravled0d48522014-11-04 16:40:20 +00003736void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003737 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3738 ? LocationSummary::kCallOnSlowPath
3739 : LocationSummary::kNoCall;
3740 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003741 locations->SetInAt(0, Location::Any());
3742 if (instruction->HasUses()) {
3743 locations->SetOut(Location::SameAsFirstInput());
3744 }
3745}
3746
3747void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003748 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003749 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3750 codegen_->AddSlowPath(slow_path);
3751
3752 LocationSummary* locations = instruction->GetLocations();
3753 Location value = locations->InAt(0);
3754
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003755 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003756 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003757 case Primitive::kPrimByte:
3758 case Primitive::kPrimChar:
3759 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003760 case Primitive::kPrimInt: {
3761 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003762 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003763 __ j(kEqual, slow_path->GetEntryLabel());
3764 } else if (value.IsStackSlot()) {
3765 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3766 __ j(kEqual, slow_path->GetEntryLabel());
3767 } else {
3768 DCHECK(value.IsConstant()) << value;
3769 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3770 __ jmp(slow_path->GetEntryLabel());
3771 }
3772 }
3773 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003774 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003775 case Primitive::kPrimLong: {
3776 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003777 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003778 __ j(kEqual, slow_path->GetEntryLabel());
3779 } else if (value.IsDoubleStackSlot()) {
3780 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3781 __ j(kEqual, slow_path->GetEntryLabel());
3782 } else {
3783 DCHECK(value.IsConstant()) << value;
3784 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3785 __ jmp(slow_path->GetEntryLabel());
3786 }
3787 }
3788 break;
3789 }
3790 default:
3791 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003792 }
Calin Juravled0d48522014-11-04 16:40:20 +00003793}
3794
Calin Juravle9aec02f2014-11-18 23:06:35 +00003795void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3796 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3797
3798 LocationSummary* locations =
3799 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3800
3801 switch (op->GetResultType()) {
3802 case Primitive::kPrimInt:
3803 case Primitive::kPrimLong: {
3804 locations->SetInAt(0, Location::RequiresRegister());
3805 // The shift count needs to be in CL.
3806 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3807 locations->SetOut(Location::SameAsFirstInput());
3808 break;
3809 }
3810 default:
3811 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3812 }
3813}
3814
3815void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3816 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3817
3818 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003819 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003820 Location second = locations->InAt(1);
3821
3822 switch (op->GetResultType()) {
3823 case Primitive::kPrimInt: {
3824 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003825 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003826 if (op->IsShl()) {
3827 __ shll(first_reg, second_reg);
3828 } else if (op->IsShr()) {
3829 __ sarl(first_reg, second_reg);
3830 } else {
3831 __ shrl(first_reg, second_reg);
3832 }
3833 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003834 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003835 if (op->IsShl()) {
3836 __ shll(first_reg, imm);
3837 } else if (op->IsShr()) {
3838 __ sarl(first_reg, imm);
3839 } else {
3840 __ shrl(first_reg, imm);
3841 }
3842 }
3843 break;
3844 }
3845 case Primitive::kPrimLong: {
3846 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003847 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003848 if (op->IsShl()) {
3849 __ shlq(first_reg, second_reg);
3850 } else if (op->IsShr()) {
3851 __ sarq(first_reg, second_reg);
3852 } else {
3853 __ shrq(first_reg, second_reg);
3854 }
3855 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003856 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003857 if (op->IsShl()) {
3858 __ shlq(first_reg, imm);
3859 } else if (op->IsShr()) {
3860 __ sarq(first_reg, imm);
3861 } else {
3862 __ shrq(first_reg, imm);
3863 }
3864 }
3865 break;
3866 }
3867 default:
3868 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003869 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003870 }
3871}
3872
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003873void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3874 LocationSummary* locations =
3875 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3876
3877 switch (ror->GetResultType()) {
3878 case Primitive::kPrimInt:
3879 case Primitive::kPrimLong: {
3880 locations->SetInAt(0, Location::RequiresRegister());
3881 // The shift count needs to be in CL (unless it is a constant).
3882 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3883 locations->SetOut(Location::SameAsFirstInput());
3884 break;
3885 }
3886 default:
3887 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3888 UNREACHABLE();
3889 }
3890}
3891
3892void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3893 LocationSummary* locations = ror->GetLocations();
3894 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3895 Location second = locations->InAt(1);
3896
3897 switch (ror->GetResultType()) {
3898 case Primitive::kPrimInt:
3899 if (second.IsRegister()) {
3900 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3901 __ rorl(first_reg, second_reg);
3902 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003903 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003904 __ rorl(first_reg, imm);
3905 }
3906 break;
3907 case Primitive::kPrimLong:
3908 if (second.IsRegister()) {
3909 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3910 __ rorq(first_reg, second_reg);
3911 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003912 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003913 __ rorq(first_reg, imm);
3914 }
3915 break;
3916 default:
3917 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3918 UNREACHABLE();
3919 }
3920}
3921
Calin Juravle9aec02f2014-11-18 23:06:35 +00003922void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3923 HandleShift(shl);
3924}
3925
3926void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3927 HandleShift(shl);
3928}
3929
3930void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3931 HandleShift(shr);
3932}
3933
3934void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3935 HandleShift(shr);
3936}
3937
3938void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3939 HandleShift(ushr);
3940}
3941
3942void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3943 HandleShift(ushr);
3944}
3945
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003946void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003947 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003948 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003949 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003950 if (instruction->IsStringAlloc()) {
3951 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3952 } else {
3953 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3954 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3955 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003956 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003957}
3958
3959void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003960 // Note: if heap poisoning is enabled, the entry point takes cares
3961 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003962 if (instruction->IsStringAlloc()) {
3963 // String is allocated through StringFactory. Call NewEmptyString entry point.
3964 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07003965 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003966 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3967 __ call(Address(temp, code_offset.SizeValue()));
3968 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3969 } else {
3970 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3971 instruction,
3972 instruction->GetDexPc(),
3973 nullptr);
3974 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3975 DCHECK(!codegen_->IsLeafMethod());
3976 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003977}
3978
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003979void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3980 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003981 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003982 InvokeRuntimeCallingConvention calling_convention;
3983 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003984 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003985 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003986 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003987}
3988
3989void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3990 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003991 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3992 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003993 // Note: if heap poisoning is enabled, the entry point takes cares
3994 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003995 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3996 instruction,
3997 instruction->GetDexPc(),
3998 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003999 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004000
4001 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004002}
4003
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004004void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004005 LocationSummary* locations =
4006 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004007 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4008 if (location.IsStackSlot()) {
4009 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4010 } else if (location.IsDoubleStackSlot()) {
4011 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4012 }
4013 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004014}
4015
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004016void InstructionCodeGeneratorX86_64::VisitParameterValue(
4017 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004018 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004019}
4020
4021void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4022 LocationSummary* locations =
4023 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4024 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4025}
4026
4027void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4028 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4029 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004030}
4031
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004032void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4033 LocationSummary* locations =
4034 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4035 locations->SetInAt(0, Location::RequiresRegister());
4036 locations->SetOut(Location::RequiresRegister());
4037}
4038
4039void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4040 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004041 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004042 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004043 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004044 __ movq(locations->Out().AsRegister<CpuRegister>(),
4045 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004046 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004047 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004048 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004049 __ movq(locations->Out().AsRegister<CpuRegister>(),
4050 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4051 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004052 __ movq(locations->Out().AsRegister<CpuRegister>(),
4053 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004054 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004055}
4056
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004057void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004058 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004059 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004060 locations->SetInAt(0, Location::RequiresRegister());
4061 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004062}
4063
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004064void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4065 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004066 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4067 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004068 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004069 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004070 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004071 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004072 break;
4073
4074 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004075 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004076 break;
4077
4078 default:
4079 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4080 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004081}
4082
David Brazdil66d126e2015-04-03 16:02:44 +01004083void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4084 LocationSummary* locations =
4085 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4086 locations->SetInAt(0, Location::RequiresRegister());
4087 locations->SetOut(Location::SameAsFirstInput());
4088}
4089
4090void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004091 LocationSummary* locations = bool_not->GetLocations();
4092 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4093 locations->Out().AsRegister<CpuRegister>().AsRegister());
4094 Location out = locations->Out();
4095 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4096}
4097
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004098void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004099 LocationSummary* locations =
4100 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004101 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004102 locations->SetInAt(i, Location::Any());
4103 }
4104 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004105}
4106
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004107void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004108 LOG(FATAL) << "Unimplemented";
4109}
4110
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004111void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004112 /*
4113 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004114 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004115 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4116 */
4117 switch (kind) {
4118 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004119 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004120 break;
4121 }
4122 case MemBarrierKind::kAnyStore:
4123 case MemBarrierKind::kLoadAny:
4124 case MemBarrierKind::kStoreStore: {
4125 // nop
4126 break;
4127 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004128 case MemBarrierKind::kNTStoreStore:
4129 // Non-Temporal Store/Store needs an explicit fence.
4130 MemoryFence(/* non-temporal */ true);
4131 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004132 }
4133}
4134
4135void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4136 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4137
Roland Levillain0d5a2812015-11-13 10:07:31 +00004138 bool object_field_get_with_read_barrier =
4139 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004140 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004141 new (GetGraph()->GetArena()) LocationSummary(instruction,
4142 object_field_get_with_read_barrier ?
4143 LocationSummary::kCallOnSlowPath :
4144 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004145 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004146 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4147 locations->SetOut(Location::RequiresFpuRegister());
4148 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004149 // The output overlaps for an object field get when read barriers
4150 // are enabled: we do not want the move to overwrite the object's
4151 // location, as we need it to emit the read barrier.
4152 locations->SetOut(
4153 Location::RequiresRegister(),
4154 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004155 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004156 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4157 // We need a temporary register for the read barrier marking slow
4158 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4159 locations->AddTemp(Location::RequiresRegister());
4160 }
Calin Juravle52c48962014-12-16 17:02:57 +00004161}
4162
4163void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4164 const FieldInfo& field_info) {
4165 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4166
4167 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004168 Location base_loc = locations->InAt(0);
4169 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004170 Location out = locations->Out();
4171 bool is_volatile = field_info.IsVolatile();
4172 Primitive::Type field_type = field_info.GetFieldType();
4173 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4174
4175 switch (field_type) {
4176 case Primitive::kPrimBoolean: {
4177 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4178 break;
4179 }
4180
4181 case Primitive::kPrimByte: {
4182 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4183 break;
4184 }
4185
4186 case Primitive::kPrimShort: {
4187 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4188 break;
4189 }
4190
4191 case Primitive::kPrimChar: {
4192 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4193 break;
4194 }
4195
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004196 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004197 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4198 break;
4199 }
4200
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004201 case Primitive::kPrimNot: {
4202 // /* HeapReference<Object> */ out = *(base + offset)
4203 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4204 Location temp_loc = locations->GetTemp(0);
4205 // Note that a potential implicit null check is handled in this
4206 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4207 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4208 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4209 if (is_volatile) {
4210 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4211 }
4212 } else {
4213 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4214 codegen_->MaybeRecordImplicitNullCheck(instruction);
4215 if (is_volatile) {
4216 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4217 }
4218 // If read barriers are enabled, emit read barriers other than
4219 // Baker's using a slow path (and also unpoison the loaded
4220 // reference, if heap poisoning is enabled).
4221 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4222 }
4223 break;
4224 }
4225
Calin Juravle52c48962014-12-16 17:02:57 +00004226 case Primitive::kPrimLong: {
4227 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4228 break;
4229 }
4230
4231 case Primitive::kPrimFloat: {
4232 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4233 break;
4234 }
4235
4236 case Primitive::kPrimDouble: {
4237 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4238 break;
4239 }
4240
4241 case Primitive::kPrimVoid:
4242 LOG(FATAL) << "Unreachable type " << field_type;
4243 UNREACHABLE();
4244 }
4245
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004246 if (field_type == Primitive::kPrimNot) {
4247 // Potential implicit null checks, in the case of reference
4248 // fields, are handled in the previous switch statement.
4249 } else {
4250 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004251 }
Roland Levillain4d027112015-07-01 15:41:14 +01004252
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004253 if (is_volatile) {
4254 if (field_type == Primitive::kPrimNot) {
4255 // Memory barriers, in the case of references, are also handled
4256 // in the previous switch statement.
4257 } else {
4258 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4259 }
Roland Levillain4d027112015-07-01 15:41:14 +01004260 }
Calin Juravle52c48962014-12-16 17:02:57 +00004261}
4262
4263void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4264 const FieldInfo& field_info) {
4265 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4266
4267 LocationSummary* locations =
4268 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004269 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004270 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004271 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004272 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004273
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004274 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004275 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004276 if (is_volatile) {
4277 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4278 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4279 } else {
4280 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4281 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004282 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004283 if (is_volatile) {
4284 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4285 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4286 } else {
4287 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4288 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004289 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004290 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004291 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004292 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004293 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004294 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4295 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004296 locations->AddTemp(Location::RequiresRegister());
4297 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004298}
4299
Calin Juravle52c48962014-12-16 17:02:57 +00004300void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004301 const FieldInfo& field_info,
4302 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004303 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4304
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004305 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004306 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4307 Location value = locations->InAt(1);
4308 bool is_volatile = field_info.IsVolatile();
4309 Primitive::Type field_type = field_info.GetFieldType();
4310 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4311
4312 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004313 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004314 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004315
Mark Mendellea5af682015-10-22 17:35:49 -04004316 bool maybe_record_implicit_null_check_done = false;
4317
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004318 switch (field_type) {
4319 case Primitive::kPrimBoolean:
4320 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004321 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004322 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004323 __ movb(Address(base, offset), Immediate(v));
4324 } else {
4325 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4326 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004327 break;
4328 }
4329
4330 case Primitive::kPrimShort:
4331 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004332 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004333 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004334 __ movw(Address(base, offset), Immediate(v));
4335 } else {
4336 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4337 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004338 break;
4339 }
4340
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004341 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004342 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004343 if (value.IsConstant()) {
4344 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004345 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4346 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4347 // Note: if heap poisoning is enabled, no need to poison
4348 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004349 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004350 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004351 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4352 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4353 __ movl(temp, value.AsRegister<CpuRegister>());
4354 __ PoisonHeapReference(temp);
4355 __ movl(Address(base, offset), temp);
4356 } else {
4357 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4358 }
Mark Mendell40741f32015-04-20 22:10:34 -04004359 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004360 break;
4361 }
4362
4363 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004364 if (value.IsConstant()) {
4365 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004366 codegen_->MoveInt64ToAddress(Address(base, offset),
4367 Address(base, offset + sizeof(int32_t)),
4368 v,
4369 instruction);
4370 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004371 } else {
4372 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4373 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004374 break;
4375 }
4376
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004377 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004378 if (value.IsConstant()) {
4379 int32_t v =
4380 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4381 __ movl(Address(base, offset), Immediate(v));
4382 } else {
4383 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4384 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004385 break;
4386 }
4387
4388 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004389 if (value.IsConstant()) {
4390 int64_t v =
4391 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4392 codegen_->MoveInt64ToAddress(Address(base, offset),
4393 Address(base, offset + sizeof(int32_t)),
4394 v,
4395 instruction);
4396 maybe_record_implicit_null_check_done = true;
4397 } else {
4398 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4399 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004400 break;
4401 }
4402
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004403 case Primitive::kPrimVoid:
4404 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004405 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004406 }
Calin Juravle52c48962014-12-16 17:02:57 +00004407
Mark Mendellea5af682015-10-22 17:35:49 -04004408 if (!maybe_record_implicit_null_check_done) {
4409 codegen_->MaybeRecordImplicitNullCheck(instruction);
4410 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004411
4412 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4413 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4414 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004415 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004416 }
4417
Calin Juravle52c48962014-12-16 17:02:57 +00004418 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004419 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004420 }
4421}
4422
4423void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4424 HandleFieldSet(instruction, instruction->GetFieldInfo());
4425}
4426
4427void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004428 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004429}
4430
4431void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004432 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004433}
4434
4435void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004436 HandleFieldGet(instruction, instruction->GetFieldInfo());
4437}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004438
Calin Juravle52c48962014-12-16 17:02:57 +00004439void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4440 HandleFieldGet(instruction);
4441}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004442
Calin Juravle52c48962014-12-16 17:02:57 +00004443void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4444 HandleFieldGet(instruction, instruction->GetFieldInfo());
4445}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004446
Calin Juravle52c48962014-12-16 17:02:57 +00004447void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4448 HandleFieldSet(instruction, instruction->GetFieldInfo());
4449}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004450
Calin Juravle52c48962014-12-16 17:02:57 +00004451void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004452 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004453}
4454
Calin Juravlee460d1d2015-09-29 04:52:17 +01004455void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4456 HUnresolvedInstanceFieldGet* instruction) {
4457 FieldAccessCallingConventionX86_64 calling_convention;
4458 codegen_->CreateUnresolvedFieldLocationSummary(
4459 instruction, instruction->GetFieldType(), calling_convention);
4460}
4461
4462void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4463 HUnresolvedInstanceFieldGet* instruction) {
4464 FieldAccessCallingConventionX86_64 calling_convention;
4465 codegen_->GenerateUnresolvedFieldAccess(instruction,
4466 instruction->GetFieldType(),
4467 instruction->GetFieldIndex(),
4468 instruction->GetDexPc(),
4469 calling_convention);
4470}
4471
4472void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4473 HUnresolvedInstanceFieldSet* instruction) {
4474 FieldAccessCallingConventionX86_64 calling_convention;
4475 codegen_->CreateUnresolvedFieldLocationSummary(
4476 instruction, instruction->GetFieldType(), calling_convention);
4477}
4478
4479void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4480 HUnresolvedInstanceFieldSet* instruction) {
4481 FieldAccessCallingConventionX86_64 calling_convention;
4482 codegen_->GenerateUnresolvedFieldAccess(instruction,
4483 instruction->GetFieldType(),
4484 instruction->GetFieldIndex(),
4485 instruction->GetDexPc(),
4486 calling_convention);
4487}
4488
4489void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4490 HUnresolvedStaticFieldGet* instruction) {
4491 FieldAccessCallingConventionX86_64 calling_convention;
4492 codegen_->CreateUnresolvedFieldLocationSummary(
4493 instruction, instruction->GetFieldType(), calling_convention);
4494}
4495
4496void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4497 HUnresolvedStaticFieldGet* instruction) {
4498 FieldAccessCallingConventionX86_64 calling_convention;
4499 codegen_->GenerateUnresolvedFieldAccess(instruction,
4500 instruction->GetFieldType(),
4501 instruction->GetFieldIndex(),
4502 instruction->GetDexPc(),
4503 calling_convention);
4504}
4505
4506void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4507 HUnresolvedStaticFieldSet* instruction) {
4508 FieldAccessCallingConventionX86_64 calling_convention;
4509 codegen_->CreateUnresolvedFieldLocationSummary(
4510 instruction, instruction->GetFieldType(), calling_convention);
4511}
4512
4513void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4514 HUnresolvedStaticFieldSet* instruction) {
4515 FieldAccessCallingConventionX86_64 calling_convention;
4516 codegen_->GenerateUnresolvedFieldAccess(instruction,
4517 instruction->GetFieldType(),
4518 instruction->GetFieldIndex(),
4519 instruction->GetDexPc(),
4520 calling_convention);
4521}
4522
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004523void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004524 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4525 ? LocationSummary::kCallOnSlowPath
4526 : LocationSummary::kNoCall;
4527 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4528 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004529 ? Location::RequiresRegister()
4530 : Location::Any();
4531 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004532 if (instruction->HasUses()) {
4533 locations->SetOut(Location::SameAsFirstInput());
4534 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004535}
4536
Calin Juravle2ae48182016-03-16 14:05:09 +00004537void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4538 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004539 return;
4540 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004541 LocationSummary* locations = instruction->GetLocations();
4542 Location obj = locations->InAt(0);
4543
4544 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004545 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004546}
4547
Calin Juravle2ae48182016-03-16 14:05:09 +00004548void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004549 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004550 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004551
4552 LocationSummary* locations = instruction->GetLocations();
4553 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004554
4555 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004556 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004557 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004558 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004559 } else {
4560 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004561 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004562 __ jmp(slow_path->GetEntryLabel());
4563 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004564 }
4565 __ j(kEqual, slow_path->GetEntryLabel());
4566}
4567
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004568void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004569 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004570}
4571
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004572void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004573 bool object_array_get_with_read_barrier =
4574 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004575 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004576 new (GetGraph()->GetArena()) LocationSummary(instruction,
4577 object_array_get_with_read_barrier ?
4578 LocationSummary::kCallOnSlowPath :
4579 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004580 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004581 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004582 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4583 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4584 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004585 // The output overlaps for an object array get when read barriers
4586 // are enabled: we do not want the move to overwrite the array's
4587 // location, as we need it to emit the read barrier.
4588 locations->SetOut(
4589 Location::RequiresRegister(),
4590 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004591 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004592 // We need a temporary register for the read barrier marking slow
4593 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4594 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4595 locations->AddTemp(Location::RequiresRegister());
4596 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004597}
4598
4599void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4600 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004601 Location obj_loc = locations->InAt(0);
4602 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004603 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004604 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004605 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004606
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004607 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004608 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004609 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004610 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004611 if (index.IsConstant()) {
4612 __ movzxb(out, Address(obj,
4613 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4614 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004615 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004616 }
4617 break;
4618 }
4619
4620 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004621 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004622 if (index.IsConstant()) {
4623 __ movsxb(out, Address(obj,
4624 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4625 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004626 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004627 }
4628 break;
4629 }
4630
4631 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004632 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004633 if (index.IsConstant()) {
4634 __ movsxw(out, Address(obj,
4635 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4636 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004637 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004638 }
4639 break;
4640 }
4641
4642 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004643 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004644 if (index.IsConstant()) {
4645 __ movzxw(out, Address(obj,
4646 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4647 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004648 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004649 }
4650 break;
4651 }
4652
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004653 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004654 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004655 if (index.IsConstant()) {
4656 __ movl(out, Address(obj,
4657 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4658 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004659 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004660 }
4661 break;
4662 }
4663
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004664 case Primitive::kPrimNot: {
4665 static_assert(
4666 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4667 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004668 // /* HeapReference<Object> */ out =
4669 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4670 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4671 Location temp = locations->GetTemp(0);
4672 // Note that a potential implicit null check is handled in this
4673 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4674 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4675 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4676 } else {
4677 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4678 if (index.IsConstant()) {
4679 uint32_t offset =
4680 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4681 __ movl(out, Address(obj, offset));
4682 codegen_->MaybeRecordImplicitNullCheck(instruction);
4683 // If read barriers are enabled, emit read barriers other than
4684 // Baker's using a slow path (and also unpoison the loaded
4685 // reference, if heap poisoning is enabled).
4686 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4687 } else {
4688 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4689 codegen_->MaybeRecordImplicitNullCheck(instruction);
4690 // If read barriers are enabled, emit read barriers other than
4691 // Baker's using a slow path (and also unpoison the loaded
4692 // reference, if heap poisoning is enabled).
4693 codegen_->MaybeGenerateReadBarrierSlow(
4694 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4695 }
4696 }
4697 break;
4698 }
4699
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004700 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004701 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004702 if (index.IsConstant()) {
4703 __ movq(out, Address(obj,
4704 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4705 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004706 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004707 }
4708 break;
4709 }
4710
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004711 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004712 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004713 if (index.IsConstant()) {
4714 __ movss(out, Address(obj,
4715 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4716 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004717 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004718 }
4719 break;
4720 }
4721
4722 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004723 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004724 if (index.IsConstant()) {
4725 __ movsd(out, Address(obj,
4726 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4727 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004728 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004729 }
4730 break;
4731 }
4732
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004733 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004734 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004735 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004736 }
Roland Levillain4d027112015-07-01 15:41:14 +01004737
4738 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004739 // Potential implicit null checks, in the case of reference
4740 // arrays, are handled in the previous switch statement.
4741 } else {
4742 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004743 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004744}
4745
4746void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004747 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004748
4749 bool needs_write_barrier =
4750 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004751 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004752 bool object_array_set_with_read_barrier =
4753 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004754
Nicolas Geoffray39468442014-09-02 15:17:15 +01004755 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004756 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004757 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004758 LocationSummary::kCallOnSlowPath :
4759 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004760
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004761 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004762 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4763 if (Primitive::IsFloatingPointType(value_type)) {
4764 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004765 } else {
4766 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4767 }
4768
4769 if (needs_write_barrier) {
4770 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004771
4772 // This first temporary register is possibly used for heap
4773 // reference poisoning and/or read barrier emission too.
4774 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004775 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004776 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004777}
4778
4779void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4780 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004781 Location array_loc = locations->InAt(0);
4782 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004783 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004784 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004785 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004786 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004787 bool needs_write_barrier =
4788 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004789 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4790 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4791 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004792
4793 switch (value_type) {
4794 case Primitive::kPrimBoolean:
4795 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004796 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4797 Address address = index.IsConstant()
4798 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4799 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4800 if (value.IsRegister()) {
4801 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004802 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004803 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004804 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004805 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004806 break;
4807 }
4808
4809 case Primitive::kPrimShort:
4810 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004811 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4812 Address address = index.IsConstant()
4813 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4814 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4815 if (value.IsRegister()) {
4816 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004817 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004818 DCHECK(value.IsConstant()) << value;
4819 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004820 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004821 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004822 break;
4823 }
4824
4825 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004826 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4827 Address address = index.IsConstant()
4828 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4829 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004830
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004831 if (!value.IsRegister()) {
4832 // Just setting null.
4833 DCHECK(instruction->InputAt(2)->IsNullConstant());
4834 DCHECK(value.IsConstant()) << value;
4835 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004836 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004837 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004838 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004839 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004840 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004841
4842 DCHECK(needs_write_barrier);
4843 CpuRegister register_value = value.AsRegister<CpuRegister>();
4844 NearLabel done, not_null, do_put;
4845 SlowPathCode* slow_path = nullptr;
4846 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004847 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004848 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4849 codegen_->AddSlowPath(slow_path);
4850 if (instruction->GetValueCanBeNull()) {
4851 __ testl(register_value, register_value);
4852 __ j(kNotEqual, &not_null);
4853 __ movl(address, Immediate(0));
4854 codegen_->MaybeRecordImplicitNullCheck(instruction);
4855 __ jmp(&done);
4856 __ Bind(&not_null);
4857 }
4858
Roland Levillain0d5a2812015-11-13 10:07:31 +00004859 if (kEmitCompilerReadBarrier) {
4860 // When read barriers are enabled, the type checking
4861 // instrumentation requires two read barriers:
4862 //
4863 // __ movl(temp2, temp);
4864 // // /* HeapReference<Class> */ temp = temp->component_type_
4865 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004866 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004867 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4868 //
4869 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4870 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004871 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004872 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4873 //
4874 // __ cmpl(temp, temp2);
4875 //
4876 // However, the second read barrier may trash `temp`, as it
4877 // is a temporary register, and as such would not be saved
4878 // along with live registers before calling the runtime (nor
4879 // restored afterwards). So in this case, we bail out and
4880 // delegate the work to the array set slow path.
4881 //
4882 // TODO: Extend the register allocator to support a new
4883 // "(locally) live temp" location so as to avoid always
4884 // going into the slow path when read barriers are enabled.
4885 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004886 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004887 // /* HeapReference<Class> */ temp = array->klass_
4888 __ movl(temp, Address(array, class_offset));
4889 codegen_->MaybeRecordImplicitNullCheck(instruction);
4890 __ MaybeUnpoisonHeapReference(temp);
4891
4892 // /* HeapReference<Class> */ temp = temp->component_type_
4893 __ movl(temp, Address(temp, component_offset));
4894 // If heap poisoning is enabled, no need to unpoison `temp`
4895 // nor the object reference in `register_value->klass`, as
4896 // we are comparing two poisoned references.
4897 __ cmpl(temp, Address(register_value, class_offset));
4898
4899 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4900 __ j(kEqual, &do_put);
4901 // If heap poisoning is enabled, the `temp` reference has
4902 // not been unpoisoned yet; unpoison it now.
4903 __ MaybeUnpoisonHeapReference(temp);
4904
4905 // /* HeapReference<Class> */ temp = temp->super_class_
4906 __ movl(temp, Address(temp, super_offset));
4907 // If heap poisoning is enabled, no need to unpoison
4908 // `temp`, as we are comparing against null below.
4909 __ testl(temp, temp);
4910 __ j(kNotEqual, slow_path->GetEntryLabel());
4911 __ Bind(&do_put);
4912 } else {
4913 __ j(kNotEqual, slow_path->GetEntryLabel());
4914 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004915 }
4916 }
4917
4918 if (kPoisonHeapReferences) {
4919 __ movl(temp, register_value);
4920 __ PoisonHeapReference(temp);
4921 __ movl(address, temp);
4922 } else {
4923 __ movl(address, register_value);
4924 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004925 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004926 codegen_->MaybeRecordImplicitNullCheck(instruction);
4927 }
4928
4929 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4930 codegen_->MarkGCCard(
4931 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4932 __ Bind(&done);
4933
4934 if (slow_path != nullptr) {
4935 __ Bind(slow_path->GetExitLabel());
4936 }
4937
4938 break;
4939 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004940
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004941 case Primitive::kPrimInt: {
4942 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4943 Address address = index.IsConstant()
4944 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4945 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4946 if (value.IsRegister()) {
4947 __ movl(address, value.AsRegister<CpuRegister>());
4948 } else {
4949 DCHECK(value.IsConstant()) << value;
4950 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4951 __ movl(address, Immediate(v));
4952 }
4953 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004954 break;
4955 }
4956
4957 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004958 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4959 Address address = index.IsConstant()
4960 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4961 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4962 if (value.IsRegister()) {
4963 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004964 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004965 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004966 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004967 Address address_high = index.IsConstant()
4968 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4969 offset + sizeof(int32_t))
4970 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4971 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004972 }
4973 break;
4974 }
4975
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004976 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004977 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4978 Address address = index.IsConstant()
4979 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4980 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004981 if (value.IsFpuRegister()) {
4982 __ movss(address, value.AsFpuRegister<XmmRegister>());
4983 } else {
4984 DCHECK(value.IsConstant());
4985 int32_t v =
4986 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4987 __ movl(address, Immediate(v));
4988 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004989 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004990 break;
4991 }
4992
4993 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004994 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4995 Address address = index.IsConstant()
4996 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4997 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004998 if (value.IsFpuRegister()) {
4999 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5000 codegen_->MaybeRecordImplicitNullCheck(instruction);
5001 } else {
5002 int64_t v =
5003 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5004 Address address_high = index.IsConstant()
5005 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
5006 offset + sizeof(int32_t))
5007 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
5008 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5009 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005010 break;
5011 }
5012
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005013 case Primitive::kPrimVoid:
5014 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005015 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005016 }
5017}
5018
5019void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005020 LocationSummary* locations =
5021 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005022 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005023 if (!instruction->IsEmittedAtUseSite()) {
5024 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5025 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005026}
5027
5028void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005029 if (instruction->IsEmittedAtUseSite()) {
5030 return;
5031 }
5032
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005033 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005034 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005035 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5036 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005037 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005038 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005039}
5040
5041void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005042 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5043 ? LocationSummary::kCallOnSlowPath
5044 : LocationSummary::kNoCall;
5045 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005046 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005047 HInstruction* length = instruction->InputAt(1);
5048 if (!length->IsEmittedAtUseSite()) {
5049 locations->SetInAt(1, Location::RegisterOrConstant(length));
5050 }
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005051 if (instruction->HasUses()) {
5052 locations->SetOut(Location::SameAsFirstInput());
5053 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005054}
5055
5056void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5057 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005058 Location index_loc = locations->InAt(0);
5059 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005060 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005061
Mark Mendell99dbd682015-04-22 16:18:52 -04005062 if (length_loc.IsConstant()) {
5063 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5064 if (index_loc.IsConstant()) {
5065 // BCE will remove the bounds check if we are guarenteed to pass.
5066 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5067 if (index < 0 || index >= length) {
5068 codegen_->AddSlowPath(slow_path);
5069 __ jmp(slow_path->GetEntryLabel());
5070 } else {
5071 // Some optimization after BCE may have generated this, and we should not
5072 // generate a bounds check if it is a valid range.
5073 }
5074 return;
5075 }
5076
5077 // We have to reverse the jump condition because the length is the constant.
5078 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5079 __ cmpl(index_reg, Immediate(length));
5080 codegen_->AddSlowPath(slow_path);
5081 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005082 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005083 HInstruction* array_length = instruction->InputAt(1);
5084 if (array_length->IsEmittedAtUseSite()) {
5085 // Address the length field in the array.
5086 DCHECK(array_length->IsArrayLength());
5087 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5088 Location array_loc = array_length->GetLocations()->InAt(0);
5089 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
5090 if (index_loc.IsConstant()) {
5091 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5092 __ cmpl(array_len, Immediate(value));
5093 } else {
5094 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5095 }
5096 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendell99dbd682015-04-22 16:18:52 -04005097 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005098 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5099 if (index_loc.IsConstant()) {
5100 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5101 __ cmpl(length, Immediate(value));
5102 } else {
5103 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5104 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005105 }
5106 codegen_->AddSlowPath(slow_path);
5107 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005108 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005109}
5110
5111void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5112 CpuRegister card,
5113 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005114 CpuRegister value,
5115 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005116 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005117 if (value_can_be_null) {
5118 __ testl(value, value);
5119 __ j(kEqual, &is_null);
5120 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005121 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005122 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005123 __ movq(temp, object);
5124 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005125 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005126 if (value_can_be_null) {
5127 __ Bind(&is_null);
5128 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005129}
5130
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005131void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005132 LOG(FATAL) << "Unimplemented";
5133}
5134
5135void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005136 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5137}
5138
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005139void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5140 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5141}
5142
5143void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005144 HBasicBlock* block = instruction->GetBlock();
5145 if (block->GetLoopInformation() != nullptr) {
5146 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5147 // The back edge will generate the suspend check.
5148 return;
5149 }
5150 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5151 // The goto will generate the suspend check.
5152 return;
5153 }
5154 GenerateSuspendCheck(instruction, nullptr);
5155}
5156
5157void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5158 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005159 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005160 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5161 if (slow_path == nullptr) {
5162 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5163 instruction->SetSlowPath(slow_path);
5164 codegen_->AddSlowPath(slow_path);
5165 if (successor != nullptr) {
5166 DCHECK(successor->IsLoopHeader());
5167 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5168 }
5169 } else {
5170 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5171 }
5172
Andreas Gampe542451c2016-07-26 09:02:02 -07005173 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005174 /* no_rip */ true),
5175 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005176 if (successor == nullptr) {
5177 __ j(kNotEqual, slow_path->GetEntryLabel());
5178 __ Bind(slow_path->GetReturnLabel());
5179 } else {
5180 __ j(kEqual, codegen_->GetLabelOf(successor));
5181 __ jmp(slow_path->GetEntryLabel());
5182 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005183}
5184
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005185X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5186 return codegen_->GetAssembler();
5187}
5188
5189void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005190 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005191 Location source = move->GetSource();
5192 Location destination = move->GetDestination();
5193
5194 if (source.IsRegister()) {
5195 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005196 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005197 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005198 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005199 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005200 } else {
5201 DCHECK(destination.IsDoubleStackSlot());
5202 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005203 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005204 }
5205 } else if (source.IsStackSlot()) {
5206 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005207 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005208 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005209 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005210 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005211 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005212 } else {
5213 DCHECK(destination.IsStackSlot());
5214 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5215 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5216 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005217 } else if (source.IsDoubleStackSlot()) {
5218 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005219 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005220 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005221 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005222 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5223 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005224 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005225 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005226 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5227 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5228 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005229 } else if (source.IsConstant()) {
5230 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005231 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5232 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005233 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005234 if (value == 0) {
5235 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5236 } else {
5237 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5238 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005239 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005240 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005241 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005242 }
5243 } else if (constant->IsLongConstant()) {
5244 int64_t value = constant->AsLongConstant()->GetValue();
5245 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005246 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005247 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005248 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005249 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005250 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005251 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005252 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005253 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005254 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005255 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005256 } else {
5257 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005258 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005259 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5260 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005261 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005262 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005263 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005264 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005265 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005266 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005267 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005268 } else {
5269 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005270 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005271 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005272 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005273 } else if (source.IsFpuRegister()) {
5274 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005275 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005276 } else if (destination.IsStackSlot()) {
5277 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005278 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005279 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005280 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005281 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005282 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005283 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005284 }
5285}
5286
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005287void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005288 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005289 __ movl(Address(CpuRegister(RSP), mem), reg);
5290 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005291}
5292
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005293void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005294 ScratchRegisterScope ensure_scratch(
5295 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5296
5297 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5298 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5299 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5300 Address(CpuRegister(RSP), mem2 + stack_offset));
5301 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5302 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5303 CpuRegister(ensure_scratch.GetRegister()));
5304}
5305
Mark Mendell8a1c7282015-06-29 15:41:28 -04005306void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5307 __ movq(CpuRegister(TMP), reg1);
5308 __ movq(reg1, reg2);
5309 __ movq(reg2, CpuRegister(TMP));
5310}
5311
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005312void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5313 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5314 __ movq(Address(CpuRegister(RSP), mem), reg);
5315 __ movq(reg, CpuRegister(TMP));
5316}
5317
5318void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5319 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005320 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005321
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005322 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5323 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5324 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5325 Address(CpuRegister(RSP), mem2 + stack_offset));
5326 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5327 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5328 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005329}
5330
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005331void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5332 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5333 __ movss(Address(CpuRegister(RSP), mem), reg);
5334 __ movd(reg, CpuRegister(TMP));
5335}
5336
5337void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5338 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5339 __ movsd(Address(CpuRegister(RSP), mem), reg);
5340 __ movd(reg, CpuRegister(TMP));
5341}
5342
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005343void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005344 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005345 Location source = move->GetSource();
5346 Location destination = move->GetDestination();
5347
5348 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005349 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005350 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005351 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005352 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005353 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005354 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005355 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5356 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005357 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005358 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005359 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005360 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5361 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005362 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005363 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5364 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5365 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005366 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005367 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005368 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005369 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005370 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005371 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005372 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005373 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005374 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005375 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005376 }
5377}
5378
5379
5380void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5381 __ pushq(CpuRegister(reg));
5382}
5383
5384
5385void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5386 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005387}
5388
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005389void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005390 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005391 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5392 Immediate(mirror::Class::kStatusInitialized));
5393 __ j(kLess, slow_path->GetEntryLabel());
5394 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005395 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005396}
5397
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005398HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5399 HLoadClass::LoadKind desired_class_load_kind) {
5400 if (kEmitCompilerReadBarrier) {
5401 switch (desired_class_load_kind) {
5402 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5403 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5404 case HLoadClass::LoadKind::kBootImageAddress:
5405 // TODO: Implement for read barrier.
5406 return HLoadClass::LoadKind::kDexCacheViaMethod;
5407 default:
5408 break;
5409 }
5410 }
5411 switch (desired_class_load_kind) {
5412 case HLoadClass::LoadKind::kReferrersClass:
5413 break;
5414 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5415 DCHECK(!GetCompilerOptions().GetCompilePic());
5416 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5417 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5418 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5419 DCHECK(GetCompilerOptions().GetCompilePic());
5420 break;
5421 case HLoadClass::LoadKind::kBootImageAddress:
5422 break;
5423 case HLoadClass::LoadKind::kDexCacheAddress:
5424 DCHECK(Runtime::Current()->UseJitCompilation());
5425 break;
5426 case HLoadClass::LoadKind::kDexCachePcRelative:
5427 DCHECK(!Runtime::Current()->UseJitCompilation());
5428 break;
5429 case HLoadClass::LoadKind::kDexCacheViaMethod:
5430 break;
5431 }
5432 return desired_class_load_kind;
5433}
5434
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005435void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005436 if (cls->NeedsAccessCheck()) {
5437 InvokeRuntimeCallingConvention calling_convention;
5438 CodeGenerator::CreateLoadClassLocationSummary(
5439 cls,
5440 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5441 Location::RegisterLocation(RAX),
5442 /* code_generator_supports_read_barrier */ true);
5443 return;
5444 }
5445
5446 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
5447 ? LocationSummary::kCallOnSlowPath
5448 : LocationSummary::kNoCall;
5449 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
5450 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5451 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5452 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5453 locations->SetInAt(0, Location::RequiresRegister());
5454 }
5455 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005456}
5457
5458void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005459 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005460 if (cls->NeedsAccessCheck()) {
5461 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5462 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5463 cls,
5464 cls->GetDexPc(),
5465 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005466 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005467 return;
5468 }
5469
Roland Levillain0d5a2812015-11-13 10:07:31 +00005470 Location out_loc = locations->Out();
5471 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005472
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005473 bool generate_null_check = false;
5474 switch (cls->GetLoadKind()) {
5475 case HLoadClass::LoadKind::kReferrersClass: {
5476 DCHECK(!cls->CanCallRuntime());
5477 DCHECK(!cls->MustGenerateClinitCheck());
5478 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5479 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5480 GenerateGcRootFieldLoad(
5481 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5482 break;
5483 }
5484 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5485 DCHECK(!kEmitCompilerReadBarrier);
5486 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5487 codegen_->RecordTypePatch(cls);
5488 break;
5489 case HLoadClass::LoadKind::kBootImageAddress: {
5490 DCHECK(!kEmitCompilerReadBarrier);
5491 DCHECK_NE(cls->GetAddress(), 0u);
5492 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5493 __ movl(out, Immediate(address)); // Zero-extended.
5494 codegen_->RecordSimplePatch();
5495 break;
5496 }
5497 case HLoadClass::LoadKind::kDexCacheAddress: {
5498 DCHECK_NE(cls->GetAddress(), 0u);
5499 // /* GcRoot<mirror::Class> */ out = *address
5500 if (IsUint<32>(cls->GetAddress())) {
5501 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
5502 GenerateGcRootFieldLoad(cls, out_loc, address);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005503 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005504 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5505 __ movq(out, Immediate(cls->GetAddress()));
5506 GenerateGcRootFieldLoad(cls, out_loc, Address(out, 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005507 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005508 generate_null_check = !cls->IsInDexCache();
5509 break;
5510 }
5511 case HLoadClass::LoadKind::kDexCachePcRelative: {
5512 uint32_t offset = cls->GetDexCacheElementOffset();
5513 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5514 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5515 /* no_rip */ false);
5516 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5517 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label);
5518 generate_null_check = !cls->IsInDexCache();
5519 break;
5520 }
5521 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5522 // /* GcRoot<mirror::Class>[] */ out =
5523 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5524 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5525 __ movq(out,
5526 Address(current_method,
5527 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5528 // /* GcRoot<mirror::Class> */ out = out[type_index]
5529 GenerateGcRootFieldLoad(
5530 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
5531 generate_null_check = !cls->IsInDexCache();
5532 break;
5533 }
5534 default:
5535 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5536 UNREACHABLE();
5537 }
5538
5539 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5540 DCHECK(cls->CanCallRuntime());
5541 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5542 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5543 codegen_->AddSlowPath(slow_path);
5544 if (generate_null_check) {
5545 __ testl(out, out);
5546 __ j(kEqual, slow_path->GetEntryLabel());
5547 }
5548 if (cls->MustGenerateClinitCheck()) {
5549 GenerateClassInitializationCheck(slow_path, out);
5550 } else {
5551 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005552 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005553 }
5554}
5555
5556void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5557 LocationSummary* locations =
5558 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5559 locations->SetInAt(0, Location::RequiresRegister());
5560 if (check->HasUses()) {
5561 locations->SetOut(Location::SameAsFirstInput());
5562 }
5563}
5564
5565void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005566 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005567 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005568 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005569 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005570 GenerateClassInitializationCheck(slow_path,
5571 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005572}
5573
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005574HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5575 HLoadString::LoadKind desired_string_load_kind) {
5576 if (kEmitCompilerReadBarrier) {
5577 switch (desired_string_load_kind) {
5578 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5579 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5580 case HLoadString::LoadKind::kBootImageAddress:
5581 // TODO: Implement for read barrier.
5582 return HLoadString::LoadKind::kDexCacheViaMethod;
5583 default:
5584 break;
5585 }
5586 }
5587 switch (desired_string_load_kind) {
5588 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5589 DCHECK(!GetCompilerOptions().GetCompilePic());
5590 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5591 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5592 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5593 DCHECK(GetCompilerOptions().GetCompilePic());
5594 break;
5595 case HLoadString::LoadKind::kBootImageAddress:
5596 break;
5597 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005598 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005599 break;
5600 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005601 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005602 break;
5603 case HLoadString::LoadKind::kDexCacheViaMethod:
5604 break;
5605 }
5606 return desired_string_load_kind;
5607}
5608
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005609void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005610 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005611 ? LocationSummary::kCallOnSlowPath
5612 : LocationSummary::kNoCall;
5613 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005614 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5615 locations->SetInAt(0, Location::RequiresRegister());
5616 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005617 locations->SetOut(Location::RequiresRegister());
5618}
5619
5620void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005621 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005622 Location out_loc = locations->Out();
5623 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005624
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005625 switch (load->GetLoadKind()) {
5626 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5627 DCHECK(!kEmitCompilerReadBarrier);
5628 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5629 codegen_->RecordStringPatch(load);
5630 return; // No dex cache slow path.
5631 }
5632 case HLoadString::LoadKind::kBootImageAddress: {
5633 DCHECK(!kEmitCompilerReadBarrier);
5634 DCHECK_NE(load->GetAddress(), 0u);
5635 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5636 __ movl(out, Immediate(address)); // Zero-extended.
5637 codegen_->RecordSimplePatch();
5638 return; // No dex cache slow path.
5639 }
5640 case HLoadString::LoadKind::kDexCacheAddress: {
5641 DCHECK_NE(load->GetAddress(), 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005642 // /* GcRoot<mirror::String> */ out = *address
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005643 if (IsUint<32>(load->GetAddress())) {
5644 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5645 GenerateGcRootFieldLoad(load, out_loc, address);
5646 } else {
5647 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5648 __ movq(out, Immediate(load->GetAddress()));
5649 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5650 }
5651 break;
5652 }
5653 case HLoadString::LoadKind::kDexCachePcRelative: {
5654 uint32_t offset = load->GetDexCacheElementOffset();
5655 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5656 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5657 /* no_rip */ false);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005658 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005659 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5660 break;
5661 }
5662 case HLoadString::LoadKind::kDexCacheViaMethod: {
5663 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5664
5665 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5666 GenerateGcRootFieldLoad(
5667 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5668 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5669 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5670 // /* GcRoot<mirror::String> */ out = out[string_index]
5671 GenerateGcRootFieldLoad(
5672 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5673 break;
5674 }
5675 default:
5676 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5677 UNREACHABLE();
5678 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005679
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005680 if (!load->IsInDexCache()) {
5681 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5682 codegen_->AddSlowPath(slow_path);
5683 __ testl(out, out);
5684 __ j(kEqual, slow_path->GetEntryLabel());
5685 __ Bind(slow_path->GetExitLabel());
5686 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005687}
5688
David Brazdilcb1c0552015-08-04 16:22:25 +01005689static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005690 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005691 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005692}
5693
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005694void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5695 LocationSummary* locations =
5696 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5697 locations->SetOut(Location::RequiresRegister());
5698}
5699
5700void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005701 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5702}
5703
5704void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5705 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5706}
5707
5708void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5709 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005710}
5711
5712void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5713 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005714 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005715 InvokeRuntimeCallingConvention calling_convention;
5716 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5717}
5718
5719void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005720 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5721 instruction,
5722 instruction->GetDexPc(),
5723 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005724 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005725}
5726
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005727static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5728 return kEmitCompilerReadBarrier &&
5729 (kUseBakerReadBarrier ||
5730 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5731 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5732 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5733}
5734
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005735void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005736 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005737 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5738 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005739 case TypeCheckKind::kExactCheck:
5740 case TypeCheckKind::kAbstractClassCheck:
5741 case TypeCheckKind::kClassHierarchyCheck:
5742 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005743 call_kind =
5744 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005745 break;
5746 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005747 case TypeCheckKind::kUnresolvedCheck:
5748 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005749 call_kind = LocationSummary::kCallOnSlowPath;
5750 break;
5751 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005752
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005753 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005754 locations->SetInAt(0, Location::RequiresRegister());
5755 locations->SetInAt(1, Location::Any());
5756 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5757 locations->SetOut(Location::RequiresRegister());
5758 // When read barriers are enabled, we need a temporary register for
5759 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005760 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005761 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005762 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005763}
5764
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005765void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005766 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005767 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005768 Location obj_loc = locations->InAt(0);
5769 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005770 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005771 Location out_loc = locations->Out();
5772 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005773 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005774 locations->GetTemp(0) :
5775 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005776 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005777 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5778 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5779 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005780 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005781 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005782
5783 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005784 // Avoid null check if we know obj is not null.
5785 if (instruction->MustDoNullCheck()) {
5786 __ testl(obj, obj);
5787 __ j(kEqual, &zero);
5788 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005789
Roland Levillain0d5a2812015-11-13 10:07:31 +00005790 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005791 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005792
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005793 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005794 case TypeCheckKind::kExactCheck: {
5795 if (cls.IsRegister()) {
5796 __ cmpl(out, cls.AsRegister<CpuRegister>());
5797 } else {
5798 DCHECK(cls.IsStackSlot()) << cls;
5799 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5800 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005801 if (zero.IsLinked()) {
5802 // Classes must be equal for the instanceof to succeed.
5803 __ j(kNotEqual, &zero);
5804 __ movl(out, Immediate(1));
5805 __ jmp(&done);
5806 } else {
5807 __ setcc(kEqual, out);
5808 // setcc only sets the low byte.
5809 __ andl(out, Immediate(1));
5810 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005811 break;
5812 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005813
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005814 case TypeCheckKind::kAbstractClassCheck: {
5815 // If the class is abstract, we eagerly fetch the super class of the
5816 // object to avoid doing a comparison we know will fail.
5817 NearLabel loop, success;
5818 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005819 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005820 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005821 __ testl(out, out);
5822 // If `out` is null, we use it for the result, and jump to `done`.
5823 __ j(kEqual, &done);
5824 if (cls.IsRegister()) {
5825 __ cmpl(out, cls.AsRegister<CpuRegister>());
5826 } else {
5827 DCHECK(cls.IsStackSlot()) << cls;
5828 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5829 }
5830 __ j(kNotEqual, &loop);
5831 __ movl(out, Immediate(1));
5832 if (zero.IsLinked()) {
5833 __ jmp(&done);
5834 }
5835 break;
5836 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005837
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005838 case TypeCheckKind::kClassHierarchyCheck: {
5839 // Walk over the class hierarchy to find a match.
5840 NearLabel loop, success;
5841 __ Bind(&loop);
5842 if (cls.IsRegister()) {
5843 __ cmpl(out, cls.AsRegister<CpuRegister>());
5844 } else {
5845 DCHECK(cls.IsStackSlot()) << cls;
5846 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5847 }
5848 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005849 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005850 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005851 __ testl(out, out);
5852 __ j(kNotEqual, &loop);
5853 // If `out` is null, we use it for the result, and jump to `done`.
5854 __ jmp(&done);
5855 __ Bind(&success);
5856 __ movl(out, Immediate(1));
5857 if (zero.IsLinked()) {
5858 __ jmp(&done);
5859 }
5860 break;
5861 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005862
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005863 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005864 // Do an exact check.
5865 NearLabel exact_check;
5866 if (cls.IsRegister()) {
5867 __ cmpl(out, cls.AsRegister<CpuRegister>());
5868 } else {
5869 DCHECK(cls.IsStackSlot()) << cls;
5870 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5871 }
5872 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005873 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005874 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005875 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005876 __ testl(out, out);
5877 // If `out` is null, we use it for the result, and jump to `done`.
5878 __ j(kEqual, &done);
5879 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5880 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005881 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005882 __ movl(out, Immediate(1));
5883 __ jmp(&done);
5884 break;
5885 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005886
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005887 case TypeCheckKind::kArrayCheck: {
5888 if (cls.IsRegister()) {
5889 __ cmpl(out, cls.AsRegister<CpuRegister>());
5890 } else {
5891 DCHECK(cls.IsStackSlot()) << cls;
5892 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5893 }
5894 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005895 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5896 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005897 codegen_->AddSlowPath(slow_path);
5898 __ j(kNotEqual, slow_path->GetEntryLabel());
5899 __ movl(out, Immediate(1));
5900 if (zero.IsLinked()) {
5901 __ jmp(&done);
5902 }
5903 break;
5904 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005905
Calin Juravle98893e12015-10-02 21:05:03 +01005906 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005907 case TypeCheckKind::kInterfaceCheck: {
5908 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005909 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005910 // cases.
5911 //
5912 // We cannot directly call the InstanceofNonTrivial runtime
5913 // entry point without resorting to a type checking slow path
5914 // here (i.e. by calling InvokeRuntime directly), as it would
5915 // require to assign fixed registers for the inputs of this
5916 // HInstanceOf instruction (following the runtime calling
5917 // convention), which might be cluttered by the potential first
5918 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005919 //
5920 // TODO: Introduce a new runtime entry point taking the object
5921 // to test (instead of its class) as argument, and let it deal
5922 // with the read barrier issues. This will let us refactor this
5923 // case of the `switch` code as it was previously (with a direct
5924 // call to the runtime not using a type checking slow path).
5925 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005926 DCHECK(locations->OnlyCallsOnSlowPath());
5927 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5928 /* is_fatal */ false);
5929 codegen_->AddSlowPath(slow_path);
5930 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005931 if (zero.IsLinked()) {
5932 __ jmp(&done);
5933 }
5934 break;
5935 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005936 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005937
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005938 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005939 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005940 __ xorl(out, out);
5941 }
5942
5943 if (done.IsLinked()) {
5944 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005945 }
5946
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005947 if (slow_path != nullptr) {
5948 __ Bind(slow_path->GetExitLabel());
5949 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005950}
5951
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005952void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005953 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5954 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005955 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5956 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005957 case TypeCheckKind::kExactCheck:
5958 case TypeCheckKind::kAbstractClassCheck:
5959 case TypeCheckKind::kClassHierarchyCheck:
5960 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005961 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5962 LocationSummary::kCallOnSlowPath :
5963 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005964 break;
5965 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005966 case TypeCheckKind::kUnresolvedCheck:
5967 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005968 call_kind = LocationSummary::kCallOnSlowPath;
5969 break;
5970 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005971 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5972 locations->SetInAt(0, Location::RequiresRegister());
5973 locations->SetInAt(1, Location::Any());
5974 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5975 locations->AddTemp(Location::RequiresRegister());
5976 // When read barriers are enabled, we need an additional temporary
5977 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005978 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005979 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005980 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005981}
5982
5983void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005984 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005985 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005986 Location obj_loc = locations->InAt(0);
5987 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005988 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005989 Location temp_loc = locations->GetTemp(0);
5990 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005991 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005992 locations->GetTemp(1) :
5993 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005994 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5995 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5996 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5997 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005998
Roland Levillain0d5a2812015-11-13 10:07:31 +00005999 bool is_type_check_slow_path_fatal =
6000 (type_check_kind == TypeCheckKind::kExactCheck ||
6001 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
6002 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
6003 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
6004 !instruction->CanThrowIntoCatchBlock();
6005 SlowPathCode* type_check_slow_path =
6006 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6007 is_type_check_slow_path_fatal);
6008 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006009
Roland Levillain0d5a2812015-11-13 10:07:31 +00006010 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006011 case TypeCheckKind::kExactCheck:
6012 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006013 NearLabel done;
6014 // Avoid null check if we know obj is not null.
6015 if (instruction->MustDoNullCheck()) {
6016 __ testl(obj, obj);
6017 __ j(kEqual, &done);
6018 }
6019
6020 // /* HeapReference<Class> */ temp = obj->klass_
6021 GenerateReferenceLoadTwoRegisters(
6022 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6023
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006024 if (cls.IsRegister()) {
6025 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6026 } else {
6027 DCHECK(cls.IsStackSlot()) << cls;
6028 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6029 }
6030 // Jump to slow path for throwing the exception or doing a
6031 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006032 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006033 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006034 break;
6035 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006036
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006037 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006038 NearLabel done;
6039 // Avoid null check if we know obj is not null.
6040 if (instruction->MustDoNullCheck()) {
6041 __ testl(obj, obj);
6042 __ j(kEqual, &done);
6043 }
6044
6045 // /* HeapReference<Class> */ temp = obj->klass_
6046 GenerateReferenceLoadTwoRegisters(
6047 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6048
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006049 // If the class is abstract, we eagerly fetch the super class of the
6050 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006051 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006052 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006053 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006054 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006055
6056 // If the class reference currently in `temp` is not null, jump
6057 // to the `compare_classes` label to compare it with the checked
6058 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006059 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006060 __ j(kNotEqual, &compare_classes);
6061 // Otherwise, jump to the slow path to throw the exception.
6062 //
6063 // But before, move back the object's class into `temp` before
6064 // going into the slow path, as it has been overwritten in the
6065 // meantime.
6066 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006067 GenerateReferenceLoadTwoRegisters(
6068 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006069 __ jmp(type_check_slow_path->GetEntryLabel());
6070
6071 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006072 if (cls.IsRegister()) {
6073 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6074 } else {
6075 DCHECK(cls.IsStackSlot()) << cls;
6076 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6077 }
6078 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006079 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006080 break;
6081 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006082
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006083 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006084 NearLabel done;
6085 // Avoid null check if we know obj is not null.
6086 if (instruction->MustDoNullCheck()) {
6087 __ testl(obj, obj);
6088 __ j(kEqual, &done);
6089 }
6090
6091 // /* HeapReference<Class> */ temp = obj->klass_
6092 GenerateReferenceLoadTwoRegisters(
6093 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6094
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006095 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006096 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006097 __ Bind(&loop);
6098 if (cls.IsRegister()) {
6099 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6100 } else {
6101 DCHECK(cls.IsStackSlot()) << cls;
6102 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6103 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006104 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006105
Roland Levillain0d5a2812015-11-13 10:07:31 +00006106 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006107 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006108
6109 // If the class reference currently in `temp` is not null, jump
6110 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006111 __ testl(temp, temp);
6112 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006113 // Otherwise, jump to the slow path to throw the exception.
6114 //
6115 // But before, move back the object's class into `temp` before
6116 // going into the slow path, as it has been overwritten in the
6117 // meantime.
6118 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006119 GenerateReferenceLoadTwoRegisters(
6120 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006121 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006122 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006123 break;
6124 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006125
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006126 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006127 // We cannot use a NearLabel here, as its range might be too
6128 // short in some cases when read barriers are enabled. This has
6129 // been observed for instance when the code emitted for this
6130 // case uses high x86-64 registers (R8-R15).
6131 Label done;
6132 // Avoid null check if we know obj is not null.
6133 if (instruction->MustDoNullCheck()) {
6134 __ testl(obj, obj);
6135 __ j(kEqual, &done);
6136 }
6137
6138 // /* HeapReference<Class> */ temp = obj->klass_
6139 GenerateReferenceLoadTwoRegisters(
6140 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6141
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006142 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006143 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006144 if (cls.IsRegister()) {
6145 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6146 } else {
6147 DCHECK(cls.IsStackSlot()) << cls;
6148 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6149 }
6150 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006151
6152 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006153 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006154 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006155
6156 // If the component type is not null (i.e. the object is indeed
6157 // an array), jump to label `check_non_primitive_component_type`
6158 // to further check that this component type is not a primitive
6159 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006160 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006161 __ j(kNotEqual, &check_non_primitive_component_type);
6162 // Otherwise, jump to the slow path to throw the exception.
6163 //
6164 // But before, move back the object's class into `temp` before
6165 // going into the slow path, as it has been overwritten in the
6166 // meantime.
6167 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006168 GenerateReferenceLoadTwoRegisters(
6169 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006170 __ jmp(type_check_slow_path->GetEntryLabel());
6171
6172 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006173 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006174 __ j(kEqual, &done);
6175 // Same comment as above regarding `temp` and the slow path.
6176 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006177 GenerateReferenceLoadTwoRegisters(
6178 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006179 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006180 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006181 break;
6182 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006183
Calin Juravle98893e12015-10-02 21:05:03 +01006184 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006185 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006186 NearLabel done;
6187 // Avoid null check if we know obj is not null.
6188 if (instruction->MustDoNullCheck()) {
6189 __ testl(obj, obj);
6190 __ j(kEqual, &done);
6191 }
6192
6193 // /* HeapReference<Class> */ temp = obj->klass_
6194 GenerateReferenceLoadTwoRegisters(
6195 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6196
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006197 // We always go into the type check slow path for the unresolved
6198 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006199 //
6200 // We cannot directly call the CheckCast runtime entry point
6201 // without resorting to a type checking slow path here (i.e. by
6202 // calling InvokeRuntime directly), as it would require to
6203 // assign fixed registers for the inputs of this HInstanceOf
6204 // instruction (following the runtime calling convention), which
6205 // might be cluttered by the potential first read barrier
6206 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006207 //
6208 // TODO: Introduce a new runtime entry point taking the object
6209 // to test (instead of its class) as argument, and let it deal
6210 // with the read barrier issues. This will let us refactor this
6211 // case of the `switch` code as it was previously (with a direct
6212 // call to the runtime not using a type checking slow path).
6213 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006214 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006215 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006216 break;
6217 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006218
Roland Levillain0d5a2812015-11-13 10:07:31 +00006219 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006220}
6221
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006222void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6223 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006224 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006225 InvokeRuntimeCallingConvention calling_convention;
6226 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6227}
6228
6229void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006230 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6231 : QUICK_ENTRY_POINT(pUnlockObject),
6232 instruction,
6233 instruction->GetDexPc(),
6234 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006235 if (instruction->IsEnter()) {
6236 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6237 } else {
6238 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6239 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006240}
6241
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006242void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6243void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6244void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6245
6246void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6247 LocationSummary* locations =
6248 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6249 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6250 || instruction->GetResultType() == Primitive::kPrimLong);
6251 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006252 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006253 locations->SetOut(Location::SameAsFirstInput());
6254}
6255
6256void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6257 HandleBitwiseOperation(instruction);
6258}
6259
6260void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6261 HandleBitwiseOperation(instruction);
6262}
6263
6264void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6265 HandleBitwiseOperation(instruction);
6266}
6267
6268void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6269 LocationSummary* locations = instruction->GetLocations();
6270 Location first = locations->InAt(0);
6271 Location second = locations->InAt(1);
6272 DCHECK(first.Equals(locations->Out()));
6273
6274 if (instruction->GetResultType() == Primitive::kPrimInt) {
6275 if (second.IsRegister()) {
6276 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006277 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006278 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006279 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006280 } else {
6281 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006282 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006283 }
6284 } else if (second.IsConstant()) {
6285 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6286 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006287 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006288 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006289 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006290 } else {
6291 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006292 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006293 }
6294 } else {
6295 Address address(CpuRegister(RSP), second.GetStackIndex());
6296 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006297 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006298 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006299 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006300 } else {
6301 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006302 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006303 }
6304 }
6305 } else {
6306 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006307 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6308 bool second_is_constant = false;
6309 int64_t value = 0;
6310 if (second.IsConstant()) {
6311 second_is_constant = true;
6312 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006313 }
Mark Mendell40741f32015-04-20 22:10:34 -04006314 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006315
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006316 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006317 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006318 if (is_int32_value) {
6319 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6320 } else {
6321 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6322 }
6323 } else if (second.IsDoubleStackSlot()) {
6324 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006325 } else {
6326 __ andq(first_reg, second.AsRegister<CpuRegister>());
6327 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006328 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006329 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006330 if (is_int32_value) {
6331 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6332 } else {
6333 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6334 }
6335 } else if (second.IsDoubleStackSlot()) {
6336 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006337 } else {
6338 __ orq(first_reg, second.AsRegister<CpuRegister>());
6339 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006340 } else {
6341 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006342 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006343 if (is_int32_value) {
6344 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6345 } else {
6346 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6347 }
6348 } else if (second.IsDoubleStackSlot()) {
6349 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006350 } else {
6351 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6352 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006353 }
6354 }
6355}
6356
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006357void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6358 Location out,
6359 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006360 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006361 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6362 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006363 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006364 if (kUseBakerReadBarrier) {
6365 // Load with fast path based Baker's read barrier.
6366 // /* HeapReference<Object> */ out = *(out + offset)
6367 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006368 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006369 } else {
6370 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006371 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006372 // in the following move operation, as we will need it for the
6373 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006374 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006375 // /* HeapReference<Object> */ out = *(out + offset)
6376 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006377 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006378 }
6379 } else {
6380 // Plain load with no read barrier.
6381 // /* HeapReference<Object> */ out = *(out + offset)
6382 __ movl(out_reg, Address(out_reg, offset));
6383 __ MaybeUnpoisonHeapReference(out_reg);
6384 }
6385}
6386
6387void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6388 Location out,
6389 Location obj,
6390 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006391 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006392 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6393 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6394 if (kEmitCompilerReadBarrier) {
6395 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006396 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006397 // Load with fast path based Baker's read barrier.
6398 // /* HeapReference<Object> */ out = *(obj + offset)
6399 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006400 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006401 } else {
6402 // Load with slow path based read barrier.
6403 // /* HeapReference<Object> */ out = *(obj + offset)
6404 __ movl(out_reg, Address(obj_reg, offset));
6405 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6406 }
6407 } else {
6408 // Plain load with no read barrier.
6409 // /* HeapReference<Object> */ out = *(obj + offset)
6410 __ movl(out_reg, Address(obj_reg, offset));
6411 __ MaybeUnpoisonHeapReference(out_reg);
6412 }
6413}
6414
6415void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6416 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006417 const Address& address,
6418 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006419 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6420 if (kEmitCompilerReadBarrier) {
6421 if (kUseBakerReadBarrier) {
6422 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6423 // Baker's read barrier are used:
6424 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006425 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006426 // if (Thread::Current()->GetIsGcMarking()) {
6427 // root = ReadBarrier::Mark(root)
6428 // }
6429
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006430 // /* GcRoot<mirror::Object> */ root = *address
6431 __ movl(root_reg, address);
6432 if (fixup_label != nullptr) {
6433 __ Bind(fixup_label);
6434 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006435 static_assert(
6436 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6437 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6438 "have different sizes.");
6439 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6440 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6441 "have different sizes.");
6442
6443 // Slow path used to mark the GC root `root`.
6444 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006445 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006446 codegen_->AddSlowPath(slow_path);
6447
Andreas Gampe542451c2016-07-26 09:02:02 -07006448 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006449 /* no_rip */ true),
6450 Immediate(0));
6451 __ j(kNotEqual, slow_path->GetEntryLabel());
6452 __ Bind(slow_path->GetExitLabel());
6453 } else {
6454 // GC root loaded through a slow path for read barriers other
6455 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006456 // /* GcRoot<mirror::Object>* */ root = address
6457 __ leaq(root_reg, address);
6458 if (fixup_label != nullptr) {
6459 __ Bind(fixup_label);
6460 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006461 // /* mirror::Object* */ root = root->Read()
6462 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6463 }
6464 } else {
6465 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006466 // /* GcRoot<mirror::Object> */ root = *address
6467 __ movl(root_reg, address);
6468 if (fixup_label != nullptr) {
6469 __ Bind(fixup_label);
6470 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006471 // Note that GC roots are not affected by heap poisoning, thus we
6472 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006473 }
6474}
6475
6476void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6477 Location ref,
6478 CpuRegister obj,
6479 uint32_t offset,
6480 Location temp,
6481 bool needs_null_check) {
6482 DCHECK(kEmitCompilerReadBarrier);
6483 DCHECK(kUseBakerReadBarrier);
6484
6485 // /* HeapReference<Object> */ ref = *(obj + offset)
6486 Address src(obj, offset);
6487 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6488}
6489
6490void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6491 Location ref,
6492 CpuRegister obj,
6493 uint32_t data_offset,
6494 Location index,
6495 Location temp,
6496 bool needs_null_check) {
6497 DCHECK(kEmitCompilerReadBarrier);
6498 DCHECK(kUseBakerReadBarrier);
6499
Roland Levillain3d312422016-06-23 13:53:42 +01006500 static_assert(
6501 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6502 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006503 // /* HeapReference<Object> */ ref =
6504 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6505 Address src = index.IsConstant() ?
6506 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6507 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6508 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6509}
6510
6511void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6512 Location ref,
6513 CpuRegister obj,
6514 const Address& src,
6515 Location temp,
6516 bool needs_null_check) {
6517 DCHECK(kEmitCompilerReadBarrier);
6518 DCHECK(kUseBakerReadBarrier);
6519
6520 // In slow path based read barriers, the read barrier call is
6521 // inserted after the original load. However, in fast path based
6522 // Baker's read barriers, we need to perform the load of
6523 // mirror::Object::monitor_ *before* the original reference load.
6524 // This load-load ordering is required by the read barrier.
6525 // The fast path/slow path (for Baker's algorithm) should look like:
6526 //
6527 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6528 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6529 // HeapReference<Object> ref = *src; // Original reference load.
6530 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6531 // if (is_gray) {
6532 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6533 // }
6534 //
6535 // Note: the original implementation in ReadBarrier::Barrier is
6536 // slightly more complex as:
6537 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006538 // the high-bits of rb_state, which are expected to be all zeroes
6539 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6540 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006541 // - it performs additional checks that we do not do here for
6542 // performance reasons.
6543
6544 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6545 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6546 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6547
6548 // /* int32_t */ monitor = obj->monitor_
6549 __ movl(temp_reg, Address(obj, monitor_offset));
6550 if (needs_null_check) {
6551 MaybeRecordImplicitNullCheck(instruction);
6552 }
6553 // /* LockWord */ lock_word = LockWord(monitor)
6554 static_assert(sizeof(LockWord) == sizeof(int32_t),
6555 "art::LockWord and int32_t have different sizes.");
6556 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6557 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6558 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6559 static_assert(
6560 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6561 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6562
6563 // Load fence to prevent load-load reordering.
6564 // Note that this is a no-op, thanks to the x86-64 memory model.
6565 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6566
6567 // The actual reference load.
6568 // /* HeapReference<Object> */ ref = *src
6569 __ movl(ref_reg, src);
6570
6571 // Object* ref = ref_addr->AsMirrorPtr()
6572 __ MaybeUnpoisonHeapReference(ref_reg);
6573
6574 // Slow path used to mark the object `ref` when it is gray.
6575 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006576 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006577 AddSlowPath(slow_path);
6578
6579 // if (rb_state == ReadBarrier::gray_ptr_)
6580 // ref = ReadBarrier::Mark(ref);
6581 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6582 __ j(kEqual, slow_path->GetEntryLabel());
6583 __ Bind(slow_path->GetExitLabel());
6584}
6585
6586void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6587 Location out,
6588 Location ref,
6589 Location obj,
6590 uint32_t offset,
6591 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006592 DCHECK(kEmitCompilerReadBarrier);
6593
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006594 // Insert a slow path based read barrier *after* the reference load.
6595 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006596 // If heap poisoning is enabled, the unpoisoning of the loaded
6597 // reference will be carried out by the runtime within the slow
6598 // path.
6599 //
6600 // Note that `ref` currently does not get unpoisoned (when heap
6601 // poisoning is enabled), which is alright as the `ref` argument is
6602 // not used by the artReadBarrierSlow entry point.
6603 //
6604 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6605 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6606 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6607 AddSlowPath(slow_path);
6608
Roland Levillain0d5a2812015-11-13 10:07:31 +00006609 __ jmp(slow_path->GetEntryLabel());
6610 __ Bind(slow_path->GetExitLabel());
6611}
6612
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006613void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6614 Location out,
6615 Location ref,
6616 Location obj,
6617 uint32_t offset,
6618 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006619 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006620 // Baker's read barriers shall be handled by the fast path
6621 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6622 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006623 // If heap poisoning is enabled, unpoisoning will be taken care of
6624 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006625 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006626 } else if (kPoisonHeapReferences) {
6627 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6628 }
6629}
6630
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006631void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6632 Location out,
6633 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006634 DCHECK(kEmitCompilerReadBarrier);
6635
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006636 // Insert a slow path based read barrier *after* the GC root load.
6637 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006638 // Note that GC roots are not affected by heap poisoning, so we do
6639 // not need to do anything special for this here.
6640 SlowPathCode* slow_path =
6641 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6642 AddSlowPath(slow_path);
6643
Roland Levillain0d5a2812015-11-13 10:07:31 +00006644 __ jmp(slow_path->GetEntryLabel());
6645 __ Bind(slow_path->GetExitLabel());
6646}
6647
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006648void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006649 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006650 LOG(FATAL) << "Unreachable";
6651}
6652
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006653void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006654 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006655 LOG(FATAL) << "Unreachable";
6656}
6657
Mark Mendellfe57faa2015-09-18 09:26:15 -04006658// Simple implementation of packed switch - generate cascaded compare/jumps.
6659void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6660 LocationSummary* locations =
6661 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6662 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006663 locations->AddTemp(Location::RequiresRegister());
6664 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006665}
6666
6667void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6668 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006669 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006670 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006671 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6672 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6673 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006674 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6675
6676 // Should we generate smaller inline compare/jumps?
6677 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6678 // Figure out the correct compare values and jump conditions.
6679 // Handle the first compare/branch as a special case because it might
6680 // jump to the default case.
6681 DCHECK_GT(num_entries, 2u);
6682 Condition first_condition;
6683 uint32_t index;
6684 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6685 if (lower_bound != 0) {
6686 first_condition = kLess;
6687 __ cmpl(value_reg_in, Immediate(lower_bound));
6688 __ j(first_condition, codegen_->GetLabelOf(default_block));
6689 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6690
6691 index = 1;
6692 } else {
6693 // Handle all the compare/jumps below.
6694 first_condition = kBelow;
6695 index = 0;
6696 }
6697
6698 // Handle the rest of the compare/jumps.
6699 for (; index + 1 < num_entries; index += 2) {
6700 int32_t compare_to_value = lower_bound + index + 1;
6701 __ cmpl(value_reg_in, Immediate(compare_to_value));
6702 // Jump to successors[index] if value < case_value[index].
6703 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6704 // Jump to successors[index + 1] if value == case_value[index + 1].
6705 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6706 }
6707
6708 if (index != num_entries) {
6709 // There are an odd number of entries. Handle the last one.
6710 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006711 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006712 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6713 }
6714
6715 // And the default for any other value.
6716 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6717 __ jmp(codegen_->GetLabelOf(default_block));
6718 }
6719 return;
6720 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006721
6722 // Remove the bias, if needed.
6723 Register value_reg_out = value_reg_in.AsRegister();
6724 if (lower_bound != 0) {
6725 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6726 value_reg_out = temp_reg.AsRegister();
6727 }
6728 CpuRegister value_reg(value_reg_out);
6729
6730 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006731 __ cmpl(value_reg, Immediate(num_entries - 1));
6732 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006733
Mark Mendell9c86b482015-09-18 13:36:07 -04006734 // We are in the range of the table.
6735 // Load the address of the jump table in the constant area.
6736 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006737
Mark Mendell9c86b482015-09-18 13:36:07 -04006738 // Load the (signed) offset from the jump table.
6739 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6740
6741 // Add the offset to the address of the table base.
6742 __ addq(temp_reg, base_reg);
6743
6744 // And jump.
6745 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006746}
6747
Aart Bikc5d47542016-01-27 17:00:35 -08006748void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6749 if (value == 0) {
6750 __ xorl(dest, dest);
6751 } else {
6752 __ movl(dest, Immediate(value));
6753 }
6754}
6755
Mark Mendell92e83bf2015-05-07 11:25:03 -04006756void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6757 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006758 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006759 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006760 } else if (IsUint<32>(value)) {
6761 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006762 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6763 } else {
6764 __ movq(dest, Immediate(value));
6765 }
6766}
6767
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006768void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6769 if (value == 0) {
6770 __ xorps(dest, dest);
6771 } else {
6772 __ movss(dest, LiteralInt32Address(value));
6773 }
6774}
6775
6776void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6777 if (value == 0) {
6778 __ xorpd(dest, dest);
6779 } else {
6780 __ movsd(dest, LiteralInt64Address(value));
6781 }
6782}
6783
6784void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6785 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6786}
6787
6788void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6789 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6790}
6791
Aart Bika19616e2016-02-01 18:57:58 -08006792void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6793 if (value == 0) {
6794 __ testl(dest, dest);
6795 } else {
6796 __ cmpl(dest, Immediate(value));
6797 }
6798}
6799
6800void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6801 if (IsInt<32>(value)) {
6802 if (value == 0) {
6803 __ testq(dest, dest);
6804 } else {
6805 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6806 }
6807 } else {
6808 // Value won't fit in an int.
6809 __ cmpq(dest, LiteralInt64Address(value));
6810 }
6811}
6812
Mark Mendellcfa410b2015-05-25 16:02:44 -04006813void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6814 DCHECK(dest.IsDoubleStackSlot());
6815 if (IsInt<32>(value)) {
6816 // Can move directly as an int32 constant.
6817 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6818 Immediate(static_cast<int32_t>(value)));
6819 } else {
6820 Load64BitValue(CpuRegister(TMP), value);
6821 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6822 }
6823}
6824
Mark Mendell9c86b482015-09-18 13:36:07 -04006825/**
6826 * Class to handle late fixup of offsets into constant area.
6827 */
6828class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6829 public:
6830 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6831 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6832
6833 protected:
6834 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6835
6836 CodeGeneratorX86_64* codegen_;
6837
6838 private:
6839 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6840 // Patch the correct offset for the instruction. We use the address of the
6841 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6842 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6843 int32_t relative_position = constant_offset - pos;
6844
6845 // Patch in the right value.
6846 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6847 }
6848
6849 // Location in constant area that the fixup refers to.
6850 size_t offset_into_constant_area_;
6851};
6852
6853/**
6854 t * Class to handle late fixup of offsets to a jump table that will be created in the
6855 * constant area.
6856 */
6857class JumpTableRIPFixup : public RIPFixup {
6858 public:
6859 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6860 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6861
6862 void CreateJumpTable() {
6863 X86_64Assembler* assembler = codegen_->GetAssembler();
6864
6865 // Ensure that the reference to the jump table has the correct offset.
6866 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6867 SetOffset(offset_in_constant_table);
6868
6869 // Compute the offset from the start of the function to this jump table.
6870 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6871
6872 // Populate the jump table with the correct values for the jump table.
6873 int32_t num_entries = switch_instr_->GetNumEntries();
6874 HBasicBlock* block = switch_instr_->GetBlock();
6875 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6876 // The value that we want is the target offset - the position of the table.
6877 for (int32_t i = 0; i < num_entries; i++) {
6878 HBasicBlock* b = successors[i];
6879 Label* l = codegen_->GetLabelOf(b);
6880 DCHECK(l->IsBound());
6881 int32_t offset_to_block = l->Position() - current_table_offset;
6882 assembler->AppendInt32(offset_to_block);
6883 }
6884 }
6885
6886 private:
6887 const HPackedSwitch* switch_instr_;
6888};
6889
Mark Mendellf55c3e02015-03-26 21:07:46 -04006890void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6891 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006892 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006893 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6894 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006895 assembler->Align(4, 0);
6896 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006897
6898 // Populate any jump tables.
6899 for (auto jump_table : fixups_to_jump_tables_) {
6900 jump_table->CreateJumpTable();
6901 }
6902
6903 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006904 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006905 }
6906
6907 // And finish up.
6908 CodeGenerator::Finalize(allocator);
6909}
6910
Mark Mendellf55c3e02015-03-26 21:07:46 -04006911Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6912 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6913 return Address::RIP(fixup);
6914}
6915
6916Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6917 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6918 return Address::RIP(fixup);
6919}
6920
6921Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6922 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6923 return Address::RIP(fixup);
6924}
6925
6926Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6927 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6928 return Address::RIP(fixup);
6929}
6930
Andreas Gampe85b62f22015-09-09 13:15:38 -07006931// TODO: trg as memory.
6932void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6933 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006934 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006935 return;
6936 }
6937
6938 DCHECK_NE(type, Primitive::kPrimVoid);
6939
6940 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6941 if (trg.Equals(return_loc)) {
6942 return;
6943 }
6944
6945 // Let the parallel move resolver take care of all of this.
6946 HParallelMove parallel_move(GetGraph()->GetArena());
6947 parallel_move.AddMove(return_loc, trg, type, nullptr);
6948 GetMoveResolver()->EmitNativeCode(&parallel_move);
6949}
6950
Mark Mendell9c86b482015-09-18 13:36:07 -04006951Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6952 // Create a fixup to be used to create and address the jump table.
6953 JumpTableRIPFixup* table_fixup =
6954 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6955
6956 // We have to populate the jump tables.
6957 fixups_to_jump_tables_.push_back(table_fixup);
6958 return Address::RIP(table_fixup);
6959}
6960
Mark Mendellea5af682015-10-22 17:35:49 -04006961void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6962 const Address& addr_high,
6963 int64_t v,
6964 HInstruction* instruction) {
6965 if (IsInt<32>(v)) {
6966 int32_t v_32 = v;
6967 __ movq(addr_low, Immediate(v_32));
6968 MaybeRecordImplicitNullCheck(instruction);
6969 } else {
6970 // Didn't fit in a register. Do it in pieces.
6971 int32_t low_v = Low32Bits(v);
6972 int32_t high_v = High32Bits(v);
6973 __ movl(addr_low, Immediate(low_v));
6974 MaybeRecordImplicitNullCheck(instruction);
6975 __ movl(addr_high, Immediate(high_v));
6976 }
6977}
6978
Roland Levillain4d027112015-07-01 15:41:14 +01006979#undef __
6980
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006981} // namespace x86_64
6982} // namespace art