blob: 31a8d14138f920fd75e2bd3eafe6f4de4dba52ae [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain62a46b22015-06-01 18:24:13 +010054#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())->
Calin Juravle175dc732015-08-25 15:42:32 +010055#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010056
Andreas Gampe85b62f22015-09-09 13:15:38 -070057class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010058 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000059 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060
Alexandre Rames2ed20af2015-03-06 13:55:35 +000061 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000062 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000064 if (instruction_->CanThrowIntoCatchBlock()) {
65 // Live registers will be restored in the catch block if caught.
66 SaveLiveRegisters(codegen, instruction_->GetLocations());
67 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000068 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
69 instruction_,
70 instruction_->GetDexPc(),
71 this);
Roland Levillain888d0672015-11-23 18:53:50 +000072 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 }
74
Alexandre Rames8158f282015-08-07 10:26:17 +010075 bool IsFatal() const OVERRIDE { return true; }
76
Alexandre Rames9931f312015-06-19 14:47:01 +010077 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
78
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
81};
82
Andreas Gampe85b62f22015-09-09 13:15:38 -070083class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000084 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000085 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000086
Alexandre Rames2ed20af2015-03-06 13:55:35 +000087 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000088 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000089 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000090 if (instruction_->CanThrowIntoCatchBlock()) {
91 // Live registers will be restored in the catch block if caught.
92 SaveLiveRegisters(codegen, instruction_->GetLocations());
93 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000094 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
95 instruction_,
96 instruction_->GetDexPc(),
97 this);
Roland Levillain888d0672015-11-23 18:53:50 +000098 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000099 }
100
Alexandre Rames8158f282015-08-07 10:26:17 +0100101 bool IsFatal() const OVERRIDE { return true; }
102
Alexandre Rames9931f312015-06-19 14:47:01 +0100103 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
104
Calin Juravled0d48522014-11-04 16:40:20 +0000105 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000106 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
107};
108
Andreas Gampe85b62f22015-09-09 13:15:38 -0700109class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000110 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000111 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
112 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000113
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000114 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000115 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000116 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000117 if (is_div_) {
118 __ negl(cpu_reg_);
119 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400120 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000121 }
122
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000123 } else {
124 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000125 if (is_div_) {
126 __ negq(cpu_reg_);
127 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400128 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000129 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000130 }
Calin Juravled0d48522014-11-04 16:40:20 +0000131 __ jmp(GetExitLabel());
132 }
133
Alexandre Rames9931f312015-06-19 14:47:01 +0100134 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
135
Calin Juravled0d48522014-11-04 16:40:20 +0000136 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000138 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000139 const bool is_div_;
140 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000141};
142
Andreas Gampe85b62f22015-09-09 13:15:38 -0700143class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100145 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000146 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000147
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000148 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000149 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000150 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000151 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000152 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
153 instruction_,
154 instruction_->GetDexPc(),
155 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000156 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000157 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100158 if (successor_ == nullptr) {
159 __ jmp(GetReturnLabel());
160 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000161 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100162 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000163 }
164
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100165 Label* GetReturnLabel() {
166 DCHECK(successor_ == nullptr);
167 return &return_label_;
168 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000169
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100170 HBasicBlock* GetSuccessor() const {
171 return successor_;
172 }
173
Alexandre Rames9931f312015-06-19 14:47:01 +0100174 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
175
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000176 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100177 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000178 Label return_label_;
179
180 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
181};
182
Andreas Gampe85b62f22015-09-09 13:15:38 -0700183class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100184 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100185 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000186 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100187
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100189 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000190 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100191 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000192 if (instruction_->CanThrowIntoCatchBlock()) {
193 // Live registers will be restored in the catch block if caught.
194 SaveLiveRegisters(codegen, instruction_->GetLocations());
195 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000196 // We're moving two locations to locations that could overlap, so we need a parallel
197 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100198 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000199 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100200 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000201 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100202 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100203 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100204 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
205 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000206 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
207 instruction_,
208 instruction_->GetDexPc(),
209 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000210 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100211 }
212
Alexandre Rames8158f282015-08-07 10:26:17 +0100213 bool IsFatal() const OVERRIDE { return true; }
214
Alexandre Rames9931f312015-06-19 14:47:01 +0100215 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
216
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100217 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100218 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
219};
220
Andreas Gampe85b62f22015-09-09 13:15:38 -0700221class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100222 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000223 LoadClassSlowPathX86_64(HLoadClass* cls,
224 HInstruction* at,
225 uint32_t dex_pc,
226 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000227 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000228 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
229 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100230
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000231 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000232 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000233 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100234 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000236 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000237
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100238 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000239 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000240 x86_64_codegen->InvokeRuntime(do_clinit_ ?
241 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
242 QUICK_ENTRY_POINT(pInitializeType),
243 at_,
244 dex_pc_,
245 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000246 if (do_clinit_) {
247 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
248 } else {
249 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
250 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100251
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000252 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000253 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000254 if (out.IsValid()) {
255 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000256 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000257 }
258
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000259 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260 __ jmp(GetExitLabel());
261 }
262
Alexandre Rames9931f312015-06-19 14:47:01 +0100263 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
264
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 // The class this slow path will load.
267 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100268
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269 // The instruction where this slow path is happening.
270 // (Might be the load class or an initialization check).
271 HInstruction* const at_;
272
273 // The dex PC of `at_`.
274 const uint32_t dex_pc_;
275
276 // Whether to initialize the class.
277 const bool do_clinit_;
278
279 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100280};
281
Andreas Gampe85b62f22015-09-09 13:15:38 -0700282class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000283 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000284 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000285
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000287 LocationSummary* locations = instruction_->GetLocations();
288 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
289
Roland Levillain0d5a2812015-11-13 10:07:31 +0000290 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000291 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000293
294 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000295 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
296 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000297 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
298 instruction_,
299 instruction_->GetDexPc(),
300 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000301 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000302 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000303 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000304 __ jmp(GetExitLabel());
305 }
306
Alexandre Rames9931f312015-06-19 14:47:01 +0100307 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
308
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000310 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
311};
312
Andreas Gampe85b62f22015-09-09 13:15:38 -0700313class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000314 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000315 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000316 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000317
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000318 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000319 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100320 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
321 : locations->Out();
322 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000323 DCHECK(instruction_->IsCheckCast()
324 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000325
Roland Levillain0d5a2812015-11-13 10:07:31 +0000326 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000327 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000328
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000329 if (!is_fatal_) {
330 SaveLiveRegisters(codegen, locations);
331 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
333 // We're moving two locations to locations that could overlap, so we need a parallel
334 // move resolver.
335 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000336 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100337 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000338 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100339 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100340 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100341 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
342 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000344 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000345 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
346 instruction_,
347 dex_pc,
348 this);
349 CheckEntrypointTypes<
350 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000351 } else {
352 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000353 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
354 instruction_,
355 dex_pc,
356 this);
357 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000358 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000360 if (!is_fatal_) {
361 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000362 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000363 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 RestoreLiveRegisters(codegen, locations);
366 __ jmp(GetExitLabel());
367 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000368 }
369
Alexandre Rames9931f312015-06-19 14:47:01 +0100370 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 bool IsFatal() const OVERRIDE { return is_fatal_; }
373
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000374 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000375 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000376
377 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
378};
379
Andreas Gampe85b62f22015-09-09 13:15:38 -0700380class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700381 public:
Aart Bik42249c32016-01-07 15:33:50 -0800382 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000383 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700384
385 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000386 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700387 __ Bind(GetEntryLabel());
388 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000389 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800390 instruction_,
391 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000392 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000393 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700394 }
395
Alexandre Rames9931f312015-06-19 14:47:01 +0100396 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
397
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700398 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
400};
401
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100402class ArraySetSlowPathX86_64 : public SlowPathCode {
403 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000404 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100405
406 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
407 LocationSummary* locations = instruction_->GetLocations();
408 __ Bind(GetEntryLabel());
409 SaveLiveRegisters(codegen, locations);
410
411 InvokeRuntimeCallingConvention calling_convention;
412 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
413 parallel_move.AddMove(
414 locations->InAt(0),
415 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
416 Primitive::kPrimNot,
417 nullptr);
418 parallel_move.AddMove(
419 locations->InAt(1),
420 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
421 Primitive::kPrimInt,
422 nullptr);
423 parallel_move.AddMove(
424 locations->InAt(2),
425 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
426 Primitive::kPrimNot,
427 nullptr);
428 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
429
Roland Levillain0d5a2812015-11-13 10:07:31 +0000430 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
431 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
432 instruction_,
433 instruction_->GetDexPc(),
434 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000435 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100436 RestoreLiveRegisters(codegen, locations);
437 __ jmp(GetExitLabel());
438 }
439
440 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
441
442 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100443 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
444};
445
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000446// Slow path marking an object during a read barrier.
447class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
448 public:
449 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000450 : SlowPathCode(instruction), out_(out), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000451 DCHECK(kEmitCompilerReadBarrier);
452 }
453
454 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
455
456 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
457 LocationSummary* locations = instruction_->GetLocations();
458 Register reg_out = out_.AsRegister<Register>();
459 DCHECK(locations->CanCall());
460 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
461 DCHECK(instruction_->IsInstanceFieldGet() ||
462 instruction_->IsStaticFieldGet() ||
463 instruction_->IsArrayGet() ||
464 instruction_->IsLoadClass() ||
465 instruction_->IsLoadString() ||
466 instruction_->IsInstanceOf() ||
467 instruction_->IsCheckCast())
468 << "Unexpected instruction in read barrier marking slow path: "
469 << instruction_->DebugName();
470
471 __ Bind(GetEntryLabel());
472 SaveLiveRegisters(codegen, locations);
473
474 InvokeRuntimeCallingConvention calling_convention;
475 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
476 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
477 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
478 instruction_,
479 instruction_->GetDexPc(),
480 this);
481 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
482 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
483
484 RestoreLiveRegisters(codegen, locations);
485 __ jmp(GetExitLabel());
486 }
487
488 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000489 const Location out_;
490 const Location obj_;
491
492 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
493};
494
Roland Levillain0d5a2812015-11-13 10:07:31 +0000495// Slow path generating a read barrier for a heap reference.
496class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
497 public:
498 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
499 Location out,
500 Location ref,
501 Location obj,
502 uint32_t offset,
503 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000504 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000505 out_(out),
506 ref_(ref),
507 obj_(obj),
508 offset_(offset),
509 index_(index) {
510 DCHECK(kEmitCompilerReadBarrier);
511 // If `obj` is equal to `out` or `ref`, it means the initial
512 // object has been overwritten by (or after) the heap object
513 // reference load to be instrumented, e.g.:
514 //
515 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000517 //
518 // In that case, we have lost the information about the original
519 // object, and the emitted read barrier cannot work properly.
520 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
521 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
522}
523
524 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
525 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
526 LocationSummary* locations = instruction_->GetLocations();
527 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
528 DCHECK(locations->CanCall());
529 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
530 DCHECK(!instruction_->IsInvoke() ||
531 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000532 instruction_->GetLocations()->Intrinsified()))
533 << "Unexpected instruction in read barrier for heap reference slow path: "
534 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000535
536 __ Bind(GetEntryLabel());
537 SaveLiveRegisters(codegen, locations);
538
539 // We may have to change the index's value, but as `index_` is a
540 // constant member (like other "inputs" of this slow path),
541 // introduce a copy of it, `index`.
542 Location index = index_;
543 if (index_.IsValid()) {
544 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
545 if (instruction_->IsArrayGet()) {
546 // Compute real offset and store it in index_.
547 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
548 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
549 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
550 // We are about to change the value of `index_reg` (see the
551 // calls to art::x86_64::X86_64Assembler::shll and
552 // art::x86_64::X86_64Assembler::AddImmediate below), but it
553 // has not been saved by the previous call to
554 // art::SlowPathCode::SaveLiveRegisters, as it is a
555 // callee-save register --
556 // art::SlowPathCode::SaveLiveRegisters does not consider
557 // callee-save registers, as it has been designed with the
558 // assumption that callee-save registers are supposed to be
559 // handled by the called function. So, as a callee-save
560 // register, `index_reg` _would_ eventually be saved onto
561 // the stack, but it would be too late: we would have
562 // changed its value earlier. Therefore, we manually save
563 // it here into another freely available register,
564 // `free_reg`, chosen of course among the caller-save
565 // registers (as a callee-save `free_reg` register would
566 // exhibit the same problem).
567 //
568 // Note we could have requested a temporary register from
569 // the register allocator instead; but we prefer not to, as
570 // this is a slow path, and we know we can find a
571 // caller-save register that is available.
572 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
573 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
574 index_reg = free_reg;
575 index = Location::RegisterLocation(index_reg);
576 } else {
577 // The initial register stored in `index_` has already been
578 // saved in the call to art::SlowPathCode::SaveLiveRegisters
579 // (as it is not a callee-save register), so we can freely
580 // use it.
581 }
582 // Shifting the index value contained in `index_reg` by the
583 // scale factor (2) cannot overflow in practice, as the
584 // runtime is unable to allocate object arrays with a size
585 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
586 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
587 static_assert(
588 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
589 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
590 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
591 } else {
592 DCHECK(instruction_->IsInvoke());
593 DCHECK(instruction_->GetLocations()->Intrinsified());
594 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
595 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
596 << instruction_->AsInvoke()->GetIntrinsic();
597 DCHECK_EQ(offset_, 0U);
598 DCHECK(index_.IsRegister());
599 }
600 }
601
602 // We're moving two or three locations to locations that could
603 // overlap, so we need a parallel move resolver.
604 InvokeRuntimeCallingConvention calling_convention;
605 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
606 parallel_move.AddMove(ref_,
607 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
608 Primitive::kPrimNot,
609 nullptr);
610 parallel_move.AddMove(obj_,
611 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
612 Primitive::kPrimNot,
613 nullptr);
614 if (index.IsValid()) {
615 parallel_move.AddMove(index,
616 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
617 Primitive::kPrimInt,
618 nullptr);
619 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
620 } else {
621 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
622 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
623 }
624 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
625 instruction_,
626 instruction_->GetDexPc(),
627 this);
628 CheckEntrypointTypes<
629 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
630 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
631
632 RestoreLiveRegisters(codegen, locations);
633 __ jmp(GetExitLabel());
634 }
635
636 const char* GetDescription() const OVERRIDE {
637 return "ReadBarrierForHeapReferenceSlowPathX86_64";
638 }
639
640 private:
641 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
642 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
643 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
644 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
645 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
646 return static_cast<CpuRegister>(i);
647 }
648 }
649 // We shall never fail to find a free caller-save register, as
650 // there are more than two core caller-save registers on x86-64
651 // (meaning it is possible to find one which is different from
652 // `ref` and `obj`).
653 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
654 LOG(FATAL) << "Could not find a free caller-save register";
655 UNREACHABLE();
656 }
657
Roland Levillain0d5a2812015-11-13 10:07:31 +0000658 const Location out_;
659 const Location ref_;
660 const Location obj_;
661 const uint32_t offset_;
662 // An additional location containing an index to an array.
663 // Only used for HArrayGet and the UnsafeGetObject &
664 // UnsafeGetObjectVolatile intrinsics.
665 const Location index_;
666
667 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
668};
669
670// Slow path generating a read barrier for a GC root.
671class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
672 public:
673 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000674 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000675 DCHECK(kEmitCompilerReadBarrier);
676 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000677
678 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
679 LocationSummary* locations = instruction_->GetLocations();
680 DCHECK(locations->CanCall());
681 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000682 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
683 << "Unexpected instruction in read barrier for GC root slow path: "
684 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000685
686 __ Bind(GetEntryLabel());
687 SaveLiveRegisters(codegen, locations);
688
689 InvokeRuntimeCallingConvention calling_convention;
690 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
691 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
692 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
693 instruction_,
694 instruction_->GetDexPc(),
695 this);
696 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
697 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
698
699 RestoreLiveRegisters(codegen, locations);
700 __ jmp(GetExitLabel());
701 }
702
703 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
704
705 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000706 const Location out_;
707 const Location root_;
708
709 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
710};
711
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100712#undef __
Roland Levillain62a46b22015-06-01 18:24:13 +0100713#define __ down_cast<X86_64Assembler*>(GetAssembler())->
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100714
Roland Levillain4fa13f62015-07-06 18:11:54 +0100715inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700716 switch (cond) {
717 case kCondEQ: return kEqual;
718 case kCondNE: return kNotEqual;
719 case kCondLT: return kLess;
720 case kCondLE: return kLessEqual;
721 case kCondGT: return kGreater;
722 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700723 case kCondB: return kBelow;
724 case kCondBE: return kBelowEqual;
725 case kCondA: return kAbove;
726 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700727 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100728 LOG(FATAL) << "Unreachable";
729 UNREACHABLE();
730}
731
Aart Bike9f37602015-10-09 11:15:55 -0700732// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100733inline Condition X86_64FPCondition(IfCondition cond) {
734 switch (cond) {
735 case kCondEQ: return kEqual;
736 case kCondNE: return kNotEqual;
737 case kCondLT: return kBelow;
738 case kCondLE: return kBelowEqual;
739 case kCondGT: return kAbove;
740 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700741 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100742 };
743 LOG(FATAL) << "Unreachable";
744 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700745}
746
Vladimir Markodc151b22015-10-15 18:02:30 +0100747HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
748 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
749 MethodReference target_method ATTRIBUTE_UNUSED) {
750 switch (desired_dispatch_info.code_ptr_location) {
751 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
752 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
753 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
754 return HInvokeStaticOrDirect::DispatchInfo {
755 desired_dispatch_info.method_load_kind,
756 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
757 desired_dispatch_info.method_load_data,
758 0u
759 };
760 default:
761 return desired_dispatch_info;
762 }
763}
764
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800765void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100766 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800767 // All registers are assumed to be correctly set up.
768
Vladimir Marko58155012015-08-19 12:49:41 +0000769 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
770 switch (invoke->GetMethodLoadKind()) {
771 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
772 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000773 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000774 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000775 break;
776 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000777 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000778 break;
779 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
780 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
781 break;
782 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
783 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
784 method_patches_.emplace_back(invoke->GetTargetMethod());
785 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
786 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000787 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000788 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000789 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000790 // Bind a new fixup label at the end of the "movl" insn.
791 uint32_t offset = invoke->GetDexCacheArrayOffset();
792 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000793 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000794 }
Vladimir Marko58155012015-08-19 12:49:41 +0000795 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000796 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000797 Register method_reg;
798 CpuRegister reg = temp.AsRegister<CpuRegister>();
799 if (current_method.IsRegister()) {
800 method_reg = current_method.AsRegister<Register>();
801 } else {
802 DCHECK(invoke->GetLocations()->Intrinsified());
803 DCHECK(!current_method.IsValid());
804 method_reg = reg.AsRegister();
805 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
806 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000807 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100808 __ movq(reg,
809 Address(CpuRegister(method_reg),
810 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100811 // temp = temp[index_in_cache];
812 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
813 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000814 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
815 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100816 }
Vladimir Marko58155012015-08-19 12:49:41 +0000817 }
818
819 switch (invoke->GetCodePtrLocation()) {
820 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
821 __ call(&frame_entry_label_);
822 break;
823 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
824 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
825 Label* label = &relative_call_patches_.back().label;
826 __ call(label); // Bind to the patch label, override at link time.
827 __ Bind(label); // Bind the label at the end of the "call" insn.
828 break;
829 }
830 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
831 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100832 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
833 LOG(FATAL) << "Unsupported";
834 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000835 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
836 // (callee_method + offset_of_quick_compiled_code)()
837 __ call(Address(callee_method.AsRegister<CpuRegister>(),
838 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
839 kX86_64WordSize).SizeValue()));
840 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000841 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800842
843 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800844}
845
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000846void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
847 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
848 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
849 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000850
851 // Use the calling convention instead of the location of the receiver, as
852 // intrinsics may have put the receiver in a different register. In the intrinsics
853 // slow path, the arguments have been moved to the right place, so here we are
854 // guaranteed that the receiver is the first register of the calling convention.
855 InvokeDexCallingConvention calling_convention;
856 Register receiver = calling_convention.GetRegisterAt(0);
857
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000858 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000859 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000860 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000861 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000862 // Instead of simply (possibly) unpoisoning `temp` here, we should
863 // emit a read barrier for the previous class reference load.
864 // However this is not required in practice, as this is an
865 // intermediate/temporary reference and because the current
866 // concurrent copying collector keeps the from-space memory
867 // intact/accessible until the end of the marking phase (the
868 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000869 __ MaybeUnpoisonHeapReference(temp);
870 // temp = temp->GetMethodAt(method_offset);
871 __ movq(temp, Address(temp, method_offset));
872 // call temp->GetEntryPoint();
873 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
874 kX86_64WordSize).SizeValue()));
875}
876
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000877void CodeGeneratorX86_64::RecordSimplePatch() {
878 if (GetCompilerOptions().GetIncludePatchInformation()) {
879 simple_patches_.emplace_back();
880 __ Bind(&simple_patches_.back());
881 }
882}
883
884void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
885 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
886 __ Bind(&string_patches_.back().label);
887}
888
889Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
890 uint32_t element_offset) {
891 // Add a patch entry and return the label.
892 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
893 return &pc_relative_dex_cache_patches_.back().label;
894}
895
Vladimir Marko58155012015-08-19 12:49:41 +0000896void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
897 DCHECK(linker_patches->empty());
898 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000899 method_patches_.size() +
900 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000901 pc_relative_dex_cache_patches_.size() +
902 simple_patches_.size() +
903 string_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000904 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000905 // The label points to the end of the "movl" insn but the literal offset for method
906 // patch needs to point to the embedded constant which occupies the last 4 bytes.
907 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000908 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000909 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000910 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
911 info.target_method.dex_file,
912 info.target_method.dex_method_index));
913 }
914 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000915 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000916 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
917 info.target_method.dex_file,
918 info.target_method.dex_method_index));
919 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000920 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
921 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000922 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
923 &info.target_dex_file,
924 info.label.Position(),
925 info.element_offset));
926 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000927 for (const Label& label : simple_patches_) {
928 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
929 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
930 }
931 for (const StringPatchInfo<Label>& info : string_patches_) {
932 // These are always PC-relative, see GetSupportedLoadStringKind().
933 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
934 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
935 &info.dex_file,
936 info.label.Position(),
937 info.string_index));
938 }
Vladimir Marko58155012015-08-19 12:49:41 +0000939}
940
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100941void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100942 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100943}
944
945void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100946 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100947}
948
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100949size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
950 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
951 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100952}
953
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100954size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
955 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
956 return kX86_64WordSize;
957}
958
959size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
960 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
961 return kX86_64WordSize;
962}
963
964size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
965 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
966 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100967}
968
Calin Juravle175dc732015-08-25 15:42:32 +0100969void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
970 HInstruction* instruction,
971 uint32_t dex_pc,
972 SlowPathCode* slow_path) {
973 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
974 instruction,
975 dex_pc,
976 slow_path);
977}
978
979void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100980 HInstruction* instruction,
981 uint32_t dex_pc,
982 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100983 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000984 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100985 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100986}
987
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000988static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000989// Use a fake return address register to mimic Quick.
990static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400991CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000992 const X86_64InstructionSetFeatures& isa_features,
993 const CompilerOptions& compiler_options,
994 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +0000995 : CodeGenerator(graph,
996 kNumberOfCpuRegisters,
997 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000998 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000999 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1000 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001001 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001002 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1003 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001004 compiler_options,
1005 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001006 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001007 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001008 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001009 move_resolver_(graph->GetArena(), this),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001010 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001011 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001012 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1013 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001014 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001015 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1016 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001017 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001018 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1019}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001020
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001021InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1022 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001023 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001024 assembler_(codegen->GetAssembler()),
1025 codegen_(codegen) {}
1026
David Brazdil58282f42016-01-14 12:45:10 +00001027void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001028 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001029 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001030
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001031 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001032 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001033}
1034
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001035static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001036 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001037}
David Srbecky9d8606d2015-04-12 09:35:32 +01001038
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001039static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001040 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001041}
1042
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001043void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001044 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001045 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001046 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001047 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001048 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001049
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001050 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001051 __ testq(CpuRegister(RAX), Address(
1052 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001053 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001054 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001055
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001056 if (HasEmptyFrame()) {
1057 return;
1058 }
1059
Nicolas Geoffray98893962015-01-21 12:32:32 +00001060 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001061 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001062 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001063 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001064 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1065 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001066 }
1067 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001068
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001069 int adjust = GetFrameSize() - GetCoreSpillSize();
1070 __ subq(CpuRegister(RSP), Immediate(adjust));
1071 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001072 uint32_t xmm_spill_location = GetFpuSpillStart();
1073 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001074
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001075 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1076 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001077 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1078 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1079 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001080 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001081 }
1082
Mathieu Chartiere401d142015-04-22 13:56:20 -07001083 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001084 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001085}
1086
1087void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001088 __ cfi().RememberState();
1089 if (!HasEmptyFrame()) {
1090 uint32_t xmm_spill_location = GetFpuSpillStart();
1091 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1092 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1093 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1094 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1095 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1096 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1097 }
1098 }
1099
1100 int adjust = GetFrameSize() - GetCoreSpillSize();
1101 __ addq(CpuRegister(RSP), Immediate(adjust));
1102 __ cfi().AdjustCFAOffset(-adjust);
1103
1104 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1105 Register reg = kCoreCalleeSaves[i];
1106 if (allocated_registers_.ContainsCoreRegister(reg)) {
1107 __ popq(CpuRegister(reg));
1108 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1109 __ cfi().Restore(DWARFReg(reg));
1110 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001111 }
1112 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001113 __ ret();
1114 __ cfi().RestoreState();
1115 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001116}
1117
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001118void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1119 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001120}
1121
David Brazdil60328912016-04-04 17:47:42 +00001122Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
1123 switch (load->GetType()) {
1124 case Primitive::kPrimLong:
1125 case Primitive::kPrimDouble:
1126 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
1127
1128 case Primitive::kPrimInt:
1129 case Primitive::kPrimNot:
1130 case Primitive::kPrimFloat:
1131 return Location::StackSlot(GetStackSlot(load->GetLocal()));
1132
1133 case Primitive::kPrimBoolean:
1134 case Primitive::kPrimByte:
1135 case Primitive::kPrimChar:
1136 case Primitive::kPrimShort:
1137 case Primitive::kPrimVoid:
1138 LOG(FATAL) << "Unexpected type " << load->GetType();
1139 UNREACHABLE();
1140 }
1141
1142 LOG(FATAL) << "Unreachable";
1143 UNREACHABLE();
1144}
1145
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001146void CodeGeneratorX86_64::Move(Location destination, Location source) {
1147 if (source.Equals(destination)) {
1148 return;
1149 }
1150 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001151 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001152 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001153 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001154 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001155 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001156 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001157 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1158 } else if (source.IsConstant()) {
1159 HConstant* constant = source.GetConstant();
1160 if (constant->IsLongConstant()) {
1161 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1162 } else {
1163 Load32BitValue(dest, GetInt32ValueOf(constant));
1164 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001165 } else {
1166 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001167 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001168 }
1169 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001170 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001171 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001172 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001173 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001174 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1175 } else if (source.IsConstant()) {
1176 HConstant* constant = source.GetConstant();
1177 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1178 if (constant->IsFloatConstant()) {
1179 Load32BitValue(dest, static_cast<int32_t>(value));
1180 } else {
1181 Load64BitValue(dest, value);
1182 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001183 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001184 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001185 } else {
1186 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001187 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001188 }
1189 } else if (destination.IsStackSlot()) {
1190 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001191 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001192 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001193 } else if (source.IsFpuRegister()) {
1194 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001195 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001196 } else if (source.IsConstant()) {
1197 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001198 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001199 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001200 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001201 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001202 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1203 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001204 }
1205 } else {
1206 DCHECK(destination.IsDoubleStackSlot());
1207 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001208 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001209 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001210 } else if (source.IsFpuRegister()) {
1211 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001212 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001213 } else if (source.IsConstant()) {
1214 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001215 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001216 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001217 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001218 } else {
1219 DCHECK(constant->IsLongConstant());
1220 value = constant->AsLongConstant()->GetValue();
1221 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001222 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001223 } else {
1224 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001225 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1226 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001227 }
1228 }
1229}
1230
Calin Juravle175dc732015-08-25 15:42:32 +01001231void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1232 DCHECK(location.IsRegister());
1233 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1234}
1235
Calin Juravlee460d1d2015-09-29 04:52:17 +01001236void CodeGeneratorX86_64::MoveLocation(
1237 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1238 Move(dst, src);
1239}
1240
1241void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1242 if (location.IsRegister()) {
1243 locations->AddTemp(location);
1244 } else {
1245 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1246 }
1247}
1248
David Brazdilfc6a86a2015-06-26 10:33:45 +00001249void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001250 DCHECK(!successor->IsExitBlock());
1251
1252 HBasicBlock* block = got->GetBlock();
1253 HInstruction* previous = got->GetPrevious();
1254
1255 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001256 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001257 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1258 return;
1259 }
1260
1261 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1262 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1263 }
1264 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001265 __ jmp(codegen_->GetLabelOf(successor));
1266 }
1267}
1268
David Brazdilfc6a86a2015-06-26 10:33:45 +00001269void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1270 got->SetLocations(nullptr);
1271}
1272
1273void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1274 HandleGoto(got, got->GetSuccessor());
1275}
1276
1277void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1278 try_boundary->SetLocations(nullptr);
1279}
1280
1281void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1282 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1283 if (!successor->IsExitBlock()) {
1284 HandleGoto(try_boundary, successor);
1285 }
1286}
1287
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001288void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1289 exit->SetLocations(nullptr);
1290}
1291
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001292void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001293}
1294
Mark Mendell152408f2015-12-31 12:28:50 -05001295template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001296void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001297 LabelType* true_label,
1298 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001299 if (cond->IsFPConditionTrueIfNaN()) {
1300 __ j(kUnordered, true_label);
1301 } else if (cond->IsFPConditionFalseIfNaN()) {
1302 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001303 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001304 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001305}
1306
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001307void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001308 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001309
Mark Mendellc4701932015-04-10 13:18:51 -04001310 Location left = locations->InAt(0);
1311 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001312 Primitive::Type type = condition->InputAt(0)->GetType();
1313 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001314 case Primitive::kPrimBoolean:
1315 case Primitive::kPrimByte:
1316 case Primitive::kPrimChar:
1317 case Primitive::kPrimShort:
1318 case Primitive::kPrimInt:
1319 case Primitive::kPrimNot: {
1320 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1321 if (right.IsConstant()) {
1322 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1323 if (value == 0) {
1324 __ testl(left_reg, left_reg);
1325 } else {
1326 __ cmpl(left_reg, Immediate(value));
1327 }
1328 } else if (right.IsStackSlot()) {
1329 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1330 } else {
1331 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1332 }
1333 break;
1334 }
Mark Mendellc4701932015-04-10 13:18:51 -04001335 case Primitive::kPrimLong: {
1336 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1337 if (right.IsConstant()) {
1338 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001339 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001340 } else if (right.IsDoubleStackSlot()) {
1341 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1342 } else {
1343 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1344 }
Mark Mendellc4701932015-04-10 13:18:51 -04001345 break;
1346 }
1347 case Primitive::kPrimFloat: {
1348 if (right.IsFpuRegister()) {
1349 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1350 } else if (right.IsConstant()) {
1351 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1352 codegen_->LiteralFloatAddress(
1353 right.GetConstant()->AsFloatConstant()->GetValue()));
1354 } else {
1355 DCHECK(right.IsStackSlot());
1356 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1357 Address(CpuRegister(RSP), right.GetStackIndex()));
1358 }
Mark Mendellc4701932015-04-10 13:18:51 -04001359 break;
1360 }
1361 case Primitive::kPrimDouble: {
1362 if (right.IsFpuRegister()) {
1363 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1364 } else if (right.IsConstant()) {
1365 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1366 codegen_->LiteralDoubleAddress(
1367 right.GetConstant()->AsDoubleConstant()->GetValue()));
1368 } else {
1369 DCHECK(right.IsDoubleStackSlot());
1370 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1371 Address(CpuRegister(RSP), right.GetStackIndex()));
1372 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001373 break;
1374 }
1375 default:
1376 LOG(FATAL) << "Unexpected condition type " << type;
1377 }
1378}
1379
1380template<class LabelType>
1381void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1382 LabelType* true_target_in,
1383 LabelType* false_target_in) {
1384 // Generated branching requires both targets to be explicit. If either of the
1385 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1386 LabelType fallthrough_target;
1387 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1388 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1389
1390 // Generate the comparison to set the CC.
1391 GenerateCompareTest(condition);
1392
1393 // Now generate the correct jump(s).
1394 Primitive::Type type = condition->InputAt(0)->GetType();
1395 switch (type) {
1396 case Primitive::kPrimLong: {
1397 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1398 break;
1399 }
1400 case Primitive::kPrimFloat: {
1401 GenerateFPJumps(condition, true_target, false_target);
1402 break;
1403 }
1404 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001405 GenerateFPJumps(condition, true_target, false_target);
1406 break;
1407 }
1408 default:
1409 LOG(FATAL) << "Unexpected condition type " << type;
1410 }
1411
David Brazdil0debae72015-11-12 18:37:00 +00001412 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001413 __ jmp(false_target);
1414 }
David Brazdil0debae72015-11-12 18:37:00 +00001415
1416 if (fallthrough_target.IsLinked()) {
1417 __ Bind(&fallthrough_target);
1418 }
Mark Mendellc4701932015-04-10 13:18:51 -04001419}
1420
David Brazdil0debae72015-11-12 18:37:00 +00001421static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1422 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1423 // are set only strictly before `branch`. We can't use the eflags on long
1424 // conditions if they are materialized due to the complex branching.
1425 return cond->IsCondition() &&
1426 cond->GetNext() == branch &&
1427 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1428}
1429
Mark Mendell152408f2015-12-31 12:28:50 -05001430template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001431void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001432 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001433 LabelType* true_target,
1434 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001435 HInstruction* cond = instruction->InputAt(condition_input_index);
1436
1437 if (true_target == nullptr && false_target == nullptr) {
1438 // Nothing to do. The code always falls through.
1439 return;
1440 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001441 // Constant condition, statically compared against "true" (integer value 1).
1442 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001443 if (true_target != nullptr) {
1444 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001445 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001446 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001447 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001448 if (false_target != nullptr) {
1449 __ jmp(false_target);
1450 }
1451 }
1452 return;
1453 }
1454
1455 // The following code generates these patterns:
1456 // (1) true_target == nullptr && false_target != nullptr
1457 // - opposite condition true => branch to false_target
1458 // (2) true_target != nullptr && false_target == nullptr
1459 // - condition true => branch to true_target
1460 // (3) true_target != nullptr && false_target != nullptr
1461 // - condition true => branch to true_target
1462 // - branch to false_target
1463 if (IsBooleanValueOrMaterializedCondition(cond)) {
1464 if (AreEflagsSetFrom(cond, instruction)) {
1465 if (true_target == nullptr) {
1466 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1467 } else {
1468 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1469 }
1470 } else {
1471 // Materialized condition, compare against 0.
1472 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1473 if (lhs.IsRegister()) {
1474 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1475 } else {
1476 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1477 }
1478 if (true_target == nullptr) {
1479 __ j(kEqual, false_target);
1480 } else {
1481 __ j(kNotEqual, true_target);
1482 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001483 }
1484 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001485 // Condition has not been materialized, use its inputs as the
1486 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001487 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001488
David Brazdil0debae72015-11-12 18:37:00 +00001489 // If this is a long or FP comparison that has been folded into
1490 // the HCondition, generate the comparison directly.
1491 Primitive::Type type = condition->InputAt(0)->GetType();
1492 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1493 GenerateCompareTestAndBranch(condition, true_target, false_target);
1494 return;
1495 }
1496
1497 Location lhs = condition->GetLocations()->InAt(0);
1498 Location rhs = condition->GetLocations()->InAt(1);
1499 if (rhs.IsRegister()) {
1500 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1501 } else if (rhs.IsConstant()) {
1502 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001503 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001504 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001505 __ cmpl(lhs.AsRegister<CpuRegister>(),
1506 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1507 }
1508 if (true_target == nullptr) {
1509 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1510 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001511 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001512 }
Dave Allison20dfc792014-06-16 20:44:29 -07001513 }
David Brazdil0debae72015-11-12 18:37:00 +00001514
1515 // If neither branch falls through (case 3), the conditional branch to `true_target`
1516 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1517 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001518 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001519 }
1520}
1521
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001522void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001523 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1524 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001525 locations->SetInAt(0, Location::Any());
1526 }
1527}
1528
1529void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001530 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1531 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1532 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1533 nullptr : codegen_->GetLabelOf(true_successor);
1534 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1535 nullptr : codegen_->GetLabelOf(false_successor);
1536 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001537}
1538
1539void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1540 LocationSummary* locations = new (GetGraph()->GetArena())
1541 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001542 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001543 locations->SetInAt(0, Location::Any());
1544 }
1545}
1546
1547void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001548 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001549 GenerateTestAndBranch<Label>(deoptimize,
1550 /* condition_input_index */ 0,
1551 slow_path->GetEntryLabel(),
1552 /* false_target */ nullptr);
1553}
1554
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001555static bool SelectCanUseCMOV(HSelect* select) {
1556 // There are no conditional move instructions for XMMs.
1557 if (Primitive::IsFloatingPointType(select->GetType())) {
1558 return false;
1559 }
1560
1561 // A FP condition doesn't generate the single CC that we need.
1562 HInstruction* condition = select->GetCondition();
1563 if (condition->IsCondition() &&
1564 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1565 return false;
1566 }
1567
1568 // We can generate a CMOV for this Select.
1569 return true;
1570}
1571
David Brazdil74eb1b22015-12-14 11:44:01 +00001572void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1573 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1574 if (Primitive::IsFloatingPointType(select->GetType())) {
1575 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001576 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001577 } else {
1578 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001579 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001580 if (select->InputAt(1)->IsConstant()) {
1581 locations->SetInAt(1, Location::RequiresRegister());
1582 } else {
1583 locations->SetInAt(1, Location::Any());
1584 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001585 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001586 locations->SetInAt(1, Location::Any());
1587 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001588 }
1589 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1590 locations->SetInAt(2, Location::RequiresRegister());
1591 }
1592 locations->SetOut(Location::SameAsFirstInput());
1593}
1594
1595void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1596 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001597 if (SelectCanUseCMOV(select)) {
1598 // If both the condition and the source types are integer, we can generate
1599 // a CMOV to implement Select.
1600 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001601 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001602 DCHECK(locations->InAt(0).Equals(locations->Out()));
1603
1604 HInstruction* select_condition = select->GetCondition();
1605 Condition cond = kNotEqual;
1606
1607 // Figure out how to test the 'condition'.
1608 if (select_condition->IsCondition()) {
1609 HCondition* condition = select_condition->AsCondition();
1610 if (!condition->IsEmittedAtUseSite()) {
1611 // This was a previously materialized condition.
1612 // Can we use the existing condition code?
1613 if (AreEflagsSetFrom(condition, select)) {
1614 // Materialization was the previous instruction. Condition codes are right.
1615 cond = X86_64IntegerCondition(condition->GetCondition());
1616 } else {
1617 // No, we have to recreate the condition code.
1618 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1619 __ testl(cond_reg, cond_reg);
1620 }
1621 } else {
1622 GenerateCompareTest(condition);
1623 cond = X86_64IntegerCondition(condition->GetCondition());
1624 }
1625 } else {
1626 // Must be a boolean condition, which needs to be compared to 0.
1627 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1628 __ testl(cond_reg, cond_reg);
1629 }
1630
1631 // If the condition is true, overwrite the output, which already contains false.
1632 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001633 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1634 if (value_true_loc.IsRegister()) {
1635 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1636 } else {
1637 __ cmov(cond,
1638 value_false,
1639 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1640 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001641 } else {
1642 NearLabel false_target;
1643 GenerateTestAndBranch<NearLabel>(select,
1644 /* condition_input_index */ 2,
1645 /* true_target */ nullptr,
1646 &false_target);
1647 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1648 __ Bind(&false_target);
1649 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001650}
1651
David Srbecky0cf44932015-12-09 14:09:59 +00001652void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1653 new (GetGraph()->GetArena()) LocationSummary(info);
1654}
1655
David Srbeckyd28f4a02016-03-14 17:14:24 +00001656void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1657 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001658}
1659
1660void CodeGeneratorX86_64::GenerateNop() {
1661 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001662}
1663
David Brazdil60328912016-04-04 17:47:42 +00001664void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
1665 local->SetLocations(nullptr);
1666}
1667
1668void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
1669 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1670}
1671
1672void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
1673 local->SetLocations(nullptr);
1674}
1675
1676void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
1677 // Nothing to do, this is driven by the code generator.
1678}
1679
1680void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
1681 LocationSummary* locations =
1682 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1683 switch (store->InputAt(1)->GetType()) {
1684 case Primitive::kPrimBoolean:
1685 case Primitive::kPrimByte:
1686 case Primitive::kPrimChar:
1687 case Primitive::kPrimShort:
1688 case Primitive::kPrimInt:
1689 case Primitive::kPrimNot:
1690 case Primitive::kPrimFloat:
1691 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1692 break;
1693
1694 case Primitive::kPrimLong:
1695 case Primitive::kPrimDouble:
1696 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1697 break;
1698
1699 default:
1700 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1701 }
1702}
1703
1704void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
1705}
1706
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001707void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001708 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001709 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001710 // Handle the long/FP comparisons made in instruction simplification.
1711 switch (cond->InputAt(0)->GetType()) {
1712 case Primitive::kPrimLong:
1713 locations->SetInAt(0, Location::RequiresRegister());
1714 locations->SetInAt(1, Location::Any());
1715 break;
1716 case Primitive::kPrimFloat:
1717 case Primitive::kPrimDouble:
1718 locations->SetInAt(0, Location::RequiresFpuRegister());
1719 locations->SetInAt(1, Location::Any());
1720 break;
1721 default:
1722 locations->SetInAt(0, Location::RequiresRegister());
1723 locations->SetInAt(1, Location::Any());
1724 break;
1725 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001726 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001727 locations->SetOut(Location::RequiresRegister());
1728 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001729}
1730
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001731void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001732 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001733 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001734 }
Mark Mendellc4701932015-04-10 13:18:51 -04001735
1736 LocationSummary* locations = cond->GetLocations();
1737 Location lhs = locations->InAt(0);
1738 Location rhs = locations->InAt(1);
1739 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001740 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001741
1742 switch (cond->InputAt(0)->GetType()) {
1743 default:
1744 // Integer case.
1745
1746 // Clear output register: setcc only sets the low byte.
1747 __ xorl(reg, reg);
1748
1749 if (rhs.IsRegister()) {
1750 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1751 } else if (rhs.IsConstant()) {
1752 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001753 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001754 } else {
1755 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1756 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001757 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001758 return;
1759 case Primitive::kPrimLong:
1760 // Clear output register: setcc only sets the low byte.
1761 __ xorl(reg, reg);
1762
1763 if (rhs.IsRegister()) {
1764 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1765 } else if (rhs.IsConstant()) {
1766 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001767 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001768 } else {
1769 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1770 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001771 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001772 return;
1773 case Primitive::kPrimFloat: {
1774 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1775 if (rhs.IsConstant()) {
1776 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1777 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1778 } else if (rhs.IsStackSlot()) {
1779 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1780 } else {
1781 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1782 }
1783 GenerateFPJumps(cond, &true_label, &false_label);
1784 break;
1785 }
1786 case Primitive::kPrimDouble: {
1787 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1788 if (rhs.IsConstant()) {
1789 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1790 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1791 } else if (rhs.IsDoubleStackSlot()) {
1792 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1793 } else {
1794 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1795 }
1796 GenerateFPJumps(cond, &true_label, &false_label);
1797 break;
1798 }
1799 }
1800
1801 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001802 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001803
Roland Levillain4fa13f62015-07-06 18:11:54 +01001804 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001805 __ Bind(&false_label);
1806 __ xorl(reg, reg);
1807 __ jmp(&done_label);
1808
Roland Levillain4fa13f62015-07-06 18:11:54 +01001809 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001810 __ Bind(&true_label);
1811 __ movl(reg, Immediate(1));
1812 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001813}
1814
1815void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001816 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001817}
1818
1819void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001820 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001821}
1822
1823void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001824 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001825}
1826
1827void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001828 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001829}
1830
1831void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001832 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001833}
1834
1835void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001836 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001837}
1838
1839void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001840 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001841}
1842
1843void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001844 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001845}
1846
1847void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001848 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001849}
1850
1851void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001852 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001853}
1854
1855void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001856 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001857}
1858
1859void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001860 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001861}
1862
Aart Bike9f37602015-10-09 11:15:55 -07001863void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001864 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001865}
1866
1867void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001868 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001869}
1870
1871void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001872 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001873}
1874
1875void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001876 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001877}
1878
1879void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001880 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001881}
1882
1883void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001884 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001885}
1886
1887void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001888 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001889}
1890
1891void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001892 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001893}
1894
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001895void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001896 LocationSummary* locations =
1897 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001898 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001899 case Primitive::kPrimBoolean:
1900 case Primitive::kPrimByte:
1901 case Primitive::kPrimShort:
1902 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001903 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001904 case Primitive::kPrimLong: {
1905 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001906 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001907 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1908 break;
1909 }
1910 case Primitive::kPrimFloat:
1911 case Primitive::kPrimDouble: {
1912 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001913 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001914 locations->SetOut(Location::RequiresRegister());
1915 break;
1916 }
1917 default:
1918 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1919 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001920}
1921
1922void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001923 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001924 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001925 Location left = locations->InAt(0);
1926 Location right = locations->InAt(1);
1927
Mark Mendell0c9497d2015-08-21 09:30:05 -04001928 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001929 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001930 Condition less_cond = kLess;
1931
Calin Juravleddb7df22014-11-25 20:56:51 +00001932 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001933 case Primitive::kPrimBoolean:
1934 case Primitive::kPrimByte:
1935 case Primitive::kPrimShort:
1936 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001937 case Primitive::kPrimInt: {
1938 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1939 if (right.IsConstant()) {
1940 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1941 codegen_->Compare32BitValue(left_reg, value);
1942 } else if (right.IsStackSlot()) {
1943 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1944 } else {
1945 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1946 }
1947 break;
1948 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001949 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001950 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1951 if (right.IsConstant()) {
1952 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001953 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001954 } else if (right.IsDoubleStackSlot()) {
1955 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001956 } else {
1957 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1958 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001959 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001960 }
1961 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001962 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1963 if (right.IsConstant()) {
1964 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1965 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1966 } else if (right.IsStackSlot()) {
1967 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1968 } else {
1969 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1970 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001971 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001972 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001973 break;
1974 }
1975 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001976 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1977 if (right.IsConstant()) {
1978 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1979 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1980 } else if (right.IsDoubleStackSlot()) {
1981 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1982 } else {
1983 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1984 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001985 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001986 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001987 break;
1988 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001989 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001990 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001991 }
Aart Bika19616e2016-02-01 18:57:58 -08001992
Calin Juravleddb7df22014-11-25 20:56:51 +00001993 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001994 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001995 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001996
Calin Juravle91debbc2014-11-26 19:01:09 +00001997 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001998 __ movl(out, Immediate(1));
1999 __ jmp(&done);
2000
2001 __ Bind(&less);
2002 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002003
2004 __ Bind(&done);
2005}
2006
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002007void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002008 LocationSummary* locations =
2009 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002010 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002011}
2012
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002013void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002014 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002015}
2016
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002017void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2018 LocationSummary* locations =
2019 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2020 locations->SetOut(Location::ConstantLocation(constant));
2021}
2022
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002023void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002024 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002025}
2026
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002027void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002028 LocationSummary* locations =
2029 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002030 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002031}
2032
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002033void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002034 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002035}
2036
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002037void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2038 LocationSummary* locations =
2039 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2040 locations->SetOut(Location::ConstantLocation(constant));
2041}
2042
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002043void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002044 // Will be generated at use site.
2045}
2046
2047void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2048 LocationSummary* locations =
2049 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2050 locations->SetOut(Location::ConstantLocation(constant));
2051}
2052
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002053void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2054 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002055 // Will be generated at use site.
2056}
2057
Calin Juravle27df7582015-04-17 19:12:31 +01002058void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2059 memory_barrier->SetLocations(nullptr);
2060}
2061
2062void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002063 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002064}
2065
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002066void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2067 ret->SetLocations(nullptr);
2068}
2069
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002070void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002071 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002072}
2073
2074void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002075 LocationSummary* locations =
2076 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002077 switch (ret->InputAt(0)->GetType()) {
2078 case Primitive::kPrimBoolean:
2079 case Primitive::kPrimByte:
2080 case Primitive::kPrimChar:
2081 case Primitive::kPrimShort:
2082 case Primitive::kPrimInt:
2083 case Primitive::kPrimNot:
2084 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002085 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002086 break;
2087
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002088 case Primitive::kPrimFloat:
2089 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002090 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002091 break;
2092
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002093 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002094 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002095 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002096}
2097
2098void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2099 if (kIsDebugBuild) {
2100 switch (ret->InputAt(0)->GetType()) {
2101 case Primitive::kPrimBoolean:
2102 case Primitive::kPrimByte:
2103 case Primitive::kPrimChar:
2104 case Primitive::kPrimShort:
2105 case Primitive::kPrimInt:
2106 case Primitive::kPrimNot:
2107 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002108 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002109 break;
2110
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002111 case Primitive::kPrimFloat:
2112 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002113 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002114 XMM0);
2115 break;
2116
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002117 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002118 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002119 }
2120 }
2121 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002122}
2123
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002124Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2125 switch (type) {
2126 case Primitive::kPrimBoolean:
2127 case Primitive::kPrimByte:
2128 case Primitive::kPrimChar:
2129 case Primitive::kPrimShort:
2130 case Primitive::kPrimInt:
2131 case Primitive::kPrimNot:
2132 case Primitive::kPrimLong:
2133 return Location::RegisterLocation(RAX);
2134
2135 case Primitive::kPrimVoid:
2136 return Location::NoLocation();
2137
2138 case Primitive::kPrimDouble:
2139 case Primitive::kPrimFloat:
2140 return Location::FpuRegisterLocation(XMM0);
2141 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002142
2143 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002144}
2145
2146Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2147 return Location::RegisterLocation(kMethodRegisterArgument);
2148}
2149
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002150Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002151 switch (type) {
2152 case Primitive::kPrimBoolean:
2153 case Primitive::kPrimByte:
2154 case Primitive::kPrimChar:
2155 case Primitive::kPrimShort:
2156 case Primitive::kPrimInt:
2157 case Primitive::kPrimNot: {
2158 uint32_t index = gp_index_++;
2159 stack_index_++;
2160 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002161 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002162 } else {
2163 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2164 }
2165 }
2166
2167 case Primitive::kPrimLong: {
2168 uint32_t index = gp_index_;
2169 stack_index_ += 2;
2170 if (index < calling_convention.GetNumberOfRegisters()) {
2171 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002172 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002173 } else {
2174 gp_index_ += 2;
2175 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2176 }
2177 }
2178
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002179 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002180 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002181 stack_index_++;
2182 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002183 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002184 } else {
2185 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2186 }
2187 }
2188
2189 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002190 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002191 stack_index_ += 2;
2192 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002193 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002194 } else {
2195 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2196 }
2197 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002198
2199 case Primitive::kPrimVoid:
2200 LOG(FATAL) << "Unexpected parameter type " << type;
2201 break;
2202 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002203 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002204}
2205
Calin Juravle175dc732015-08-25 15:42:32 +01002206void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2207 // The trampoline uses the same calling convention as dex calling conventions,
2208 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2209 // the method_idx.
2210 HandleInvoke(invoke);
2211}
2212
2213void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2214 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2215}
2216
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002217void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002218 // Explicit clinit checks triggered by static invokes must have been pruned by
2219 // art::PrepareForRegisterAllocation.
2220 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002221
Mark Mendellfb8d2792015-03-31 22:16:59 -04002222 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002223 if (intrinsic.TryDispatch(invoke)) {
2224 return;
2225 }
2226
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002227 HandleInvoke(invoke);
2228}
2229
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002230static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2231 if (invoke->GetLocations()->Intrinsified()) {
2232 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2233 intrinsic.Dispatch(invoke);
2234 return true;
2235 }
2236 return false;
2237}
2238
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002239void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002240 // Explicit clinit checks triggered by static invokes must have been pruned by
2241 // art::PrepareForRegisterAllocation.
2242 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002243
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002244 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2245 return;
2246 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002247
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002248 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002249 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002250 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002251 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002252}
2253
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002254void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002255 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002256 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002257}
2258
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002259void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002260 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002261 if (intrinsic.TryDispatch(invoke)) {
2262 return;
2263 }
2264
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002265 HandleInvoke(invoke);
2266}
2267
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002268void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002269 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2270 return;
2271 }
2272
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002273 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002274 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002275 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002276}
2277
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002278void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2279 HandleInvoke(invoke);
2280 // Add the hidden argument.
2281 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2282}
2283
2284void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2285 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002286 LocationSummary* locations = invoke->GetLocations();
2287 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2288 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002289 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2290 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002291 Location receiver = locations->InAt(0);
2292 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2293
Roland Levillain0d5a2812015-11-13 10:07:31 +00002294 // Set the hidden argument. This is safe to do this here, as RAX
2295 // won't be modified thereafter, before the `call` instruction.
2296 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002297 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002298
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002299 if (receiver.IsStackSlot()) {
2300 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002301 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002302 __ movl(temp, Address(temp, class_offset));
2303 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002304 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002305 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002306 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002307 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002308 // Instead of simply (possibly) unpoisoning `temp` here, we should
2309 // emit a read barrier for the previous class reference load.
2310 // However this is not required in practice, as this is an
2311 // intermediate/temporary reference and because the current
2312 // concurrent copying collector keeps the from-space memory
2313 // intact/accessible until the end of the marking phase (the
2314 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002315 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002316 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002317 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002318 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002319 __ call(Address(temp,
2320 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002321
2322 DCHECK(!codegen_->IsLeafMethod());
2323 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2324}
2325
Roland Levillain88cb1752014-10-20 16:36:47 +01002326void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2327 LocationSummary* locations =
2328 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2329 switch (neg->GetResultType()) {
2330 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002331 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002332 locations->SetInAt(0, Location::RequiresRegister());
2333 locations->SetOut(Location::SameAsFirstInput());
2334 break;
2335
Roland Levillain88cb1752014-10-20 16:36:47 +01002336 case Primitive::kPrimFloat:
2337 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002338 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002339 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002340 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002341 break;
2342
2343 default:
2344 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2345 }
2346}
2347
2348void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2349 LocationSummary* locations = neg->GetLocations();
2350 Location out = locations->Out();
2351 Location in = locations->InAt(0);
2352 switch (neg->GetResultType()) {
2353 case Primitive::kPrimInt:
2354 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002355 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002356 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002357 break;
2358
2359 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002360 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002361 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002362 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002363 break;
2364
Roland Levillain5368c212014-11-27 15:03:41 +00002365 case Primitive::kPrimFloat: {
2366 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002367 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002368 // Implement float negation with an exclusive or with value
2369 // 0x80000000 (mask for bit 31, representing the sign of a
2370 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002371 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002372 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002373 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002374 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002375
Roland Levillain5368c212014-11-27 15:03:41 +00002376 case Primitive::kPrimDouble: {
2377 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002378 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002379 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002380 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002381 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002382 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002383 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002384 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002385 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002386
2387 default:
2388 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2389 }
2390}
2391
Roland Levillaindff1f282014-11-05 14:15:05 +00002392void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2393 LocationSummary* locations =
2394 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2395 Primitive::Type result_type = conversion->GetResultType();
2396 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002397 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002398
David Brazdilb2bd1c52015-03-25 11:17:37 +00002399 // The Java language does not allow treating boolean as an integral type but
2400 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002401
Roland Levillaindff1f282014-11-05 14:15:05 +00002402 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002403 case Primitive::kPrimByte:
2404 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002405 case Primitive::kPrimLong:
2406 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002407 case Primitive::kPrimBoolean:
2408 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002409 case Primitive::kPrimShort:
2410 case Primitive::kPrimInt:
2411 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002412 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002413 locations->SetInAt(0, Location::Any());
2414 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2415 break;
2416
2417 default:
2418 LOG(FATAL) << "Unexpected type conversion from " << input_type
2419 << " to " << result_type;
2420 }
2421 break;
2422
Roland Levillain01a8d712014-11-14 16:27:39 +00002423 case Primitive::kPrimShort:
2424 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002425 case Primitive::kPrimLong:
2426 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002427 case Primitive::kPrimBoolean:
2428 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002429 case Primitive::kPrimByte:
2430 case Primitive::kPrimInt:
2431 case Primitive::kPrimChar:
2432 // Processing a Dex `int-to-short' instruction.
2433 locations->SetInAt(0, Location::Any());
2434 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2435 break;
2436
2437 default:
2438 LOG(FATAL) << "Unexpected type conversion from " << input_type
2439 << " to " << result_type;
2440 }
2441 break;
2442
Roland Levillain946e1432014-11-11 17:35:19 +00002443 case Primitive::kPrimInt:
2444 switch (input_type) {
2445 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002446 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002447 locations->SetInAt(0, Location::Any());
2448 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2449 break;
2450
2451 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002452 // Processing a Dex `float-to-int' instruction.
2453 locations->SetInAt(0, Location::RequiresFpuRegister());
2454 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002455 break;
2456
Roland Levillain946e1432014-11-11 17:35:19 +00002457 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002458 // Processing a Dex `double-to-int' instruction.
2459 locations->SetInAt(0, Location::RequiresFpuRegister());
2460 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002461 break;
2462
2463 default:
2464 LOG(FATAL) << "Unexpected type conversion from " << input_type
2465 << " to " << result_type;
2466 }
2467 break;
2468
Roland Levillaindff1f282014-11-05 14:15:05 +00002469 case Primitive::kPrimLong:
2470 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002471 case Primitive::kPrimBoolean:
2472 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002473 case Primitive::kPrimByte:
2474 case Primitive::kPrimShort:
2475 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002476 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002477 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002478 // TODO: We would benefit from a (to-be-implemented)
2479 // Location::RegisterOrStackSlot requirement for this input.
2480 locations->SetInAt(0, Location::RequiresRegister());
2481 locations->SetOut(Location::RequiresRegister());
2482 break;
2483
2484 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002485 // Processing a Dex `float-to-long' instruction.
2486 locations->SetInAt(0, Location::RequiresFpuRegister());
2487 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002488 break;
2489
Roland Levillaindff1f282014-11-05 14:15:05 +00002490 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002491 // Processing a Dex `double-to-long' instruction.
2492 locations->SetInAt(0, Location::RequiresFpuRegister());
2493 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002494 break;
2495
2496 default:
2497 LOG(FATAL) << "Unexpected type conversion from " << input_type
2498 << " to " << result_type;
2499 }
2500 break;
2501
Roland Levillain981e4542014-11-14 11:47:14 +00002502 case Primitive::kPrimChar:
2503 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002504 case Primitive::kPrimLong:
2505 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002506 case Primitive::kPrimBoolean:
2507 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002508 case Primitive::kPrimByte:
2509 case Primitive::kPrimShort:
2510 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002511 // Processing a Dex `int-to-char' instruction.
2512 locations->SetInAt(0, Location::Any());
2513 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2514 break;
2515
2516 default:
2517 LOG(FATAL) << "Unexpected type conversion from " << input_type
2518 << " to " << result_type;
2519 }
2520 break;
2521
Roland Levillaindff1f282014-11-05 14:15:05 +00002522 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002523 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002524 case Primitive::kPrimBoolean:
2525 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002526 case Primitive::kPrimByte:
2527 case Primitive::kPrimShort:
2528 case Primitive::kPrimInt:
2529 case Primitive::kPrimChar:
2530 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002531 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002532 locations->SetOut(Location::RequiresFpuRegister());
2533 break;
2534
2535 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002536 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002537 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002538 locations->SetOut(Location::RequiresFpuRegister());
2539 break;
2540
Roland Levillaincff13742014-11-17 14:32:17 +00002541 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002542 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002543 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002544 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002545 break;
2546
2547 default:
2548 LOG(FATAL) << "Unexpected type conversion from " << input_type
2549 << " to " << result_type;
2550 };
2551 break;
2552
Roland Levillaindff1f282014-11-05 14:15:05 +00002553 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002554 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002555 case Primitive::kPrimBoolean:
2556 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002557 case Primitive::kPrimByte:
2558 case Primitive::kPrimShort:
2559 case Primitive::kPrimInt:
2560 case Primitive::kPrimChar:
2561 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002562 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002563 locations->SetOut(Location::RequiresFpuRegister());
2564 break;
2565
2566 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002567 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002568 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002569 locations->SetOut(Location::RequiresFpuRegister());
2570 break;
2571
Roland Levillaincff13742014-11-17 14:32:17 +00002572 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002573 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002574 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002575 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002576 break;
2577
2578 default:
2579 LOG(FATAL) << "Unexpected type conversion from " << input_type
2580 << " to " << result_type;
2581 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002582 break;
2583
2584 default:
2585 LOG(FATAL) << "Unexpected type conversion from " << input_type
2586 << " to " << result_type;
2587 }
2588}
2589
2590void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2591 LocationSummary* locations = conversion->GetLocations();
2592 Location out = locations->Out();
2593 Location in = locations->InAt(0);
2594 Primitive::Type result_type = conversion->GetResultType();
2595 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002596 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002597 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002598 case Primitive::kPrimByte:
2599 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002600 case Primitive::kPrimLong:
2601 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002602 case Primitive::kPrimBoolean:
2603 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002604 case Primitive::kPrimShort:
2605 case Primitive::kPrimInt:
2606 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002607 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002608 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002609 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002610 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002611 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002612 Address(CpuRegister(RSP), in.GetStackIndex()));
2613 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002614 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002615 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002616 }
2617 break;
2618
2619 default:
2620 LOG(FATAL) << "Unexpected type conversion from " << input_type
2621 << " to " << result_type;
2622 }
2623 break;
2624
Roland Levillain01a8d712014-11-14 16:27:39 +00002625 case Primitive::kPrimShort:
2626 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002627 case Primitive::kPrimLong:
2628 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002629 case Primitive::kPrimBoolean:
2630 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002631 case Primitive::kPrimByte:
2632 case Primitive::kPrimInt:
2633 case Primitive::kPrimChar:
2634 // Processing a Dex `int-to-short' instruction.
2635 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002636 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002637 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002638 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002639 Address(CpuRegister(RSP), in.GetStackIndex()));
2640 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002641 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002642 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002643 }
2644 break;
2645
2646 default:
2647 LOG(FATAL) << "Unexpected type conversion from " << input_type
2648 << " to " << result_type;
2649 }
2650 break;
2651
Roland Levillain946e1432014-11-11 17:35:19 +00002652 case Primitive::kPrimInt:
2653 switch (input_type) {
2654 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002655 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002656 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002657 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002658 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002659 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002660 Address(CpuRegister(RSP), in.GetStackIndex()));
2661 } else {
2662 DCHECK(in.IsConstant());
2663 DCHECK(in.GetConstant()->IsLongConstant());
2664 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002665 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002666 }
2667 break;
2668
Roland Levillain3f8f9362014-12-02 17:45:01 +00002669 case Primitive::kPrimFloat: {
2670 // Processing a Dex `float-to-int' instruction.
2671 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2672 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002673 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002674
2675 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002676 // if input >= (float)INT_MAX goto done
2677 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002678 __ j(kAboveEqual, &done);
2679 // if input == NaN goto nan
2680 __ j(kUnordered, &nan);
2681 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002682 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002683 __ jmp(&done);
2684 __ Bind(&nan);
2685 // output = 0
2686 __ xorl(output, output);
2687 __ Bind(&done);
2688 break;
2689 }
2690
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002691 case Primitive::kPrimDouble: {
2692 // Processing a Dex `double-to-int' instruction.
2693 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2694 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002695 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002696
2697 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002698 // if input >= (double)INT_MAX goto done
2699 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002700 __ j(kAboveEqual, &done);
2701 // if input == NaN goto nan
2702 __ j(kUnordered, &nan);
2703 // output = double-to-int-truncate(input)
2704 __ cvttsd2si(output, input);
2705 __ jmp(&done);
2706 __ Bind(&nan);
2707 // output = 0
2708 __ xorl(output, output);
2709 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002710 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002711 }
Roland Levillain946e1432014-11-11 17:35:19 +00002712
2713 default:
2714 LOG(FATAL) << "Unexpected type conversion from " << input_type
2715 << " to " << result_type;
2716 }
2717 break;
2718
Roland Levillaindff1f282014-11-05 14:15:05 +00002719 case Primitive::kPrimLong:
2720 switch (input_type) {
2721 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002722 case Primitive::kPrimBoolean:
2723 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002724 case Primitive::kPrimByte:
2725 case Primitive::kPrimShort:
2726 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002727 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002728 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002729 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002730 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002731 break;
2732
Roland Levillain624279f2014-12-04 11:54:28 +00002733 case Primitive::kPrimFloat: {
2734 // Processing a Dex `float-to-long' instruction.
2735 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2736 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002737 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002738
Mark Mendell92e83bf2015-05-07 11:25:03 -04002739 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002740 // if input >= (float)LONG_MAX goto done
2741 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002742 __ j(kAboveEqual, &done);
2743 // if input == NaN goto nan
2744 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002745 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002746 __ cvttss2si(output, input, true);
2747 __ jmp(&done);
2748 __ Bind(&nan);
2749 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002750 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002751 __ Bind(&done);
2752 break;
2753 }
2754
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002755 case Primitive::kPrimDouble: {
2756 // Processing a Dex `double-to-long' instruction.
2757 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2758 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002759 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002760
Mark Mendell92e83bf2015-05-07 11:25:03 -04002761 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002762 // if input >= (double)LONG_MAX goto done
2763 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002764 __ j(kAboveEqual, &done);
2765 // if input == NaN goto nan
2766 __ j(kUnordered, &nan);
2767 // output = double-to-long-truncate(input)
2768 __ cvttsd2si(output, input, true);
2769 __ jmp(&done);
2770 __ Bind(&nan);
2771 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002772 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002773 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002774 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002775 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002776
2777 default:
2778 LOG(FATAL) << "Unexpected type conversion from " << input_type
2779 << " to " << result_type;
2780 }
2781 break;
2782
Roland Levillain981e4542014-11-14 11:47:14 +00002783 case Primitive::kPrimChar:
2784 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002785 case Primitive::kPrimLong:
2786 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002787 case Primitive::kPrimBoolean:
2788 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002789 case Primitive::kPrimByte:
2790 case Primitive::kPrimShort:
2791 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002792 // Processing a Dex `int-to-char' instruction.
2793 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002794 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002795 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002796 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002797 Address(CpuRegister(RSP), in.GetStackIndex()));
2798 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002799 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002800 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002801 }
2802 break;
2803
2804 default:
2805 LOG(FATAL) << "Unexpected type conversion from " << input_type
2806 << " to " << result_type;
2807 }
2808 break;
2809
Roland Levillaindff1f282014-11-05 14:15:05 +00002810 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002811 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002812 case Primitive::kPrimBoolean:
2813 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002814 case Primitive::kPrimByte:
2815 case Primitive::kPrimShort:
2816 case Primitive::kPrimInt:
2817 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002818 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002819 if (in.IsRegister()) {
2820 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2821 } else if (in.IsConstant()) {
2822 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2823 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002824 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002825 } else {
2826 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2827 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2828 }
Roland Levillaincff13742014-11-17 14:32:17 +00002829 break;
2830
2831 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002832 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002833 if (in.IsRegister()) {
2834 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2835 } else if (in.IsConstant()) {
2836 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2837 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002838 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002839 } else {
2840 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2841 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2842 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002843 break;
2844
Roland Levillaincff13742014-11-17 14:32:17 +00002845 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002846 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002847 if (in.IsFpuRegister()) {
2848 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2849 } else if (in.IsConstant()) {
2850 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2851 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002852 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002853 } else {
2854 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2855 Address(CpuRegister(RSP), in.GetStackIndex()));
2856 }
Roland Levillaincff13742014-11-17 14:32:17 +00002857 break;
2858
2859 default:
2860 LOG(FATAL) << "Unexpected type conversion from " << input_type
2861 << " to " << result_type;
2862 };
2863 break;
2864
Roland Levillaindff1f282014-11-05 14:15:05 +00002865 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002866 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002867 case Primitive::kPrimBoolean:
2868 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002869 case Primitive::kPrimByte:
2870 case Primitive::kPrimShort:
2871 case Primitive::kPrimInt:
2872 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002873 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002874 if (in.IsRegister()) {
2875 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2876 } else if (in.IsConstant()) {
2877 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2878 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002879 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002880 } else {
2881 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2882 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2883 }
Roland Levillaincff13742014-11-17 14:32:17 +00002884 break;
2885
2886 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002887 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002888 if (in.IsRegister()) {
2889 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2890 } else if (in.IsConstant()) {
2891 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2892 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002893 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002894 } else {
2895 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2896 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2897 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002898 break;
2899
Roland Levillaincff13742014-11-17 14:32:17 +00002900 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002901 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002902 if (in.IsFpuRegister()) {
2903 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2904 } else if (in.IsConstant()) {
2905 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2906 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002907 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002908 } else {
2909 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2910 Address(CpuRegister(RSP), in.GetStackIndex()));
2911 }
Roland Levillaincff13742014-11-17 14:32:17 +00002912 break;
2913
2914 default:
2915 LOG(FATAL) << "Unexpected type conversion from " << input_type
2916 << " to " << result_type;
2917 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002918 break;
2919
2920 default:
2921 LOG(FATAL) << "Unexpected type conversion from " << input_type
2922 << " to " << result_type;
2923 }
2924}
2925
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002926void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002927 LocationSummary* locations =
2928 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002929 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002930 case Primitive::kPrimInt: {
2931 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002932 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2933 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002934 break;
2935 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002936
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002937 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002938 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002939 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002940 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002941 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002942 break;
2943 }
2944
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002945 case Primitive::kPrimDouble:
2946 case Primitive::kPrimFloat: {
2947 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002948 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002949 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002950 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002951 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002952
2953 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002954 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002955 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002956}
2957
2958void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2959 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002960 Location first = locations->InAt(0);
2961 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002962 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002963
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002964 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002965 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002966 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002967 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2968 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002969 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2970 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002971 } else {
2972 __ leal(out.AsRegister<CpuRegister>(), Address(
2973 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2974 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002975 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002976 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2977 __ addl(out.AsRegister<CpuRegister>(),
2978 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2979 } else {
2980 __ leal(out.AsRegister<CpuRegister>(), Address(
2981 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2982 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002983 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002984 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002985 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002986 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002987 break;
2988 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002989
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002990 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002991 if (second.IsRegister()) {
2992 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2993 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002994 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2995 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002996 } else {
2997 __ leaq(out.AsRegister<CpuRegister>(), Address(
2998 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2999 }
3000 } else {
3001 DCHECK(second.IsConstant());
3002 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3003 int32_t int32_value = Low32Bits(value);
3004 DCHECK_EQ(int32_value, value);
3005 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3006 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3007 } else {
3008 __ leaq(out.AsRegister<CpuRegister>(), Address(
3009 first.AsRegister<CpuRegister>(), int32_value));
3010 }
3011 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003012 break;
3013 }
3014
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003015 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003016 if (second.IsFpuRegister()) {
3017 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3018 } else if (second.IsConstant()) {
3019 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003020 codegen_->LiteralFloatAddress(
3021 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003022 } else {
3023 DCHECK(second.IsStackSlot());
3024 __ addss(first.AsFpuRegister<XmmRegister>(),
3025 Address(CpuRegister(RSP), second.GetStackIndex()));
3026 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003027 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003028 }
3029
3030 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003031 if (second.IsFpuRegister()) {
3032 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3033 } else if (second.IsConstant()) {
3034 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003035 codegen_->LiteralDoubleAddress(
3036 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003037 } else {
3038 DCHECK(second.IsDoubleStackSlot());
3039 __ addsd(first.AsFpuRegister<XmmRegister>(),
3040 Address(CpuRegister(RSP), second.GetStackIndex()));
3041 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003042 break;
3043 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003044
3045 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003046 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003047 }
3048}
3049
3050void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003051 LocationSummary* locations =
3052 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003053 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003054 case Primitive::kPrimInt: {
3055 locations->SetInAt(0, Location::RequiresRegister());
3056 locations->SetInAt(1, Location::Any());
3057 locations->SetOut(Location::SameAsFirstInput());
3058 break;
3059 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003060 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003061 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003062 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003063 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003064 break;
3065 }
Calin Juravle11351682014-10-23 15:38:15 +01003066 case Primitive::kPrimFloat:
3067 case Primitive::kPrimDouble: {
3068 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003069 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003070 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003071 break;
Calin Juravle11351682014-10-23 15:38:15 +01003072 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003073 default:
Calin Juravle11351682014-10-23 15:38:15 +01003074 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003075 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003076}
3077
3078void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3079 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003080 Location first = locations->InAt(0);
3081 Location second = locations->InAt(1);
3082 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003083 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003084 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003085 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003086 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003087 } else if (second.IsConstant()) {
3088 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003089 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003090 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003091 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003092 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003093 break;
3094 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003095 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003096 if (second.IsConstant()) {
3097 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3098 DCHECK(IsInt<32>(value));
3099 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3100 } else {
3101 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3102 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003103 break;
3104 }
3105
Calin Juravle11351682014-10-23 15:38:15 +01003106 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003107 if (second.IsFpuRegister()) {
3108 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3109 } else if (second.IsConstant()) {
3110 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003111 codegen_->LiteralFloatAddress(
3112 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003113 } else {
3114 DCHECK(second.IsStackSlot());
3115 __ subss(first.AsFpuRegister<XmmRegister>(),
3116 Address(CpuRegister(RSP), second.GetStackIndex()));
3117 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003118 break;
Calin Juravle11351682014-10-23 15:38:15 +01003119 }
3120
3121 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003122 if (second.IsFpuRegister()) {
3123 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3124 } else if (second.IsConstant()) {
3125 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003126 codegen_->LiteralDoubleAddress(
3127 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003128 } else {
3129 DCHECK(second.IsDoubleStackSlot());
3130 __ subsd(first.AsFpuRegister<XmmRegister>(),
3131 Address(CpuRegister(RSP), second.GetStackIndex()));
3132 }
Calin Juravle11351682014-10-23 15:38:15 +01003133 break;
3134 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003135
3136 default:
Calin Juravle11351682014-10-23 15:38:15 +01003137 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003138 }
3139}
3140
Calin Juravle34bacdf2014-10-07 20:23:36 +01003141void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3142 LocationSummary* locations =
3143 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3144 switch (mul->GetResultType()) {
3145 case Primitive::kPrimInt: {
3146 locations->SetInAt(0, Location::RequiresRegister());
3147 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003148 if (mul->InputAt(1)->IsIntConstant()) {
3149 // Can use 3 operand multiply.
3150 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3151 } else {
3152 locations->SetOut(Location::SameAsFirstInput());
3153 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003154 break;
3155 }
3156 case Primitive::kPrimLong: {
3157 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003158 locations->SetInAt(1, Location::Any());
3159 if (mul->InputAt(1)->IsLongConstant() &&
3160 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003161 // Can use 3 operand multiply.
3162 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3163 } else {
3164 locations->SetOut(Location::SameAsFirstInput());
3165 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003166 break;
3167 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003168 case Primitive::kPrimFloat:
3169 case Primitive::kPrimDouble: {
3170 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003171 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003172 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003173 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003174 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003175
3176 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003177 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003178 }
3179}
3180
3181void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3182 LocationSummary* locations = mul->GetLocations();
3183 Location first = locations->InAt(0);
3184 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003185 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003186 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003187 case Primitive::kPrimInt:
3188 // The constant may have ended up in a register, so test explicitly to avoid
3189 // problems where the output may not be the same as the first operand.
3190 if (mul->InputAt(1)->IsIntConstant()) {
3191 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3192 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3193 } else if (second.IsRegister()) {
3194 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003195 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003196 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003197 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003198 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003199 __ imull(first.AsRegister<CpuRegister>(),
3200 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003201 }
3202 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003203 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003204 // The constant may have ended up in a register, so test explicitly to avoid
3205 // problems where the output may not be the same as the first operand.
3206 if (mul->InputAt(1)->IsLongConstant()) {
3207 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3208 if (IsInt<32>(value)) {
3209 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3210 Immediate(static_cast<int32_t>(value)));
3211 } else {
3212 // Have to use the constant area.
3213 DCHECK(first.Equals(out));
3214 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3215 }
3216 } else if (second.IsRegister()) {
3217 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003218 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003219 } else {
3220 DCHECK(second.IsDoubleStackSlot());
3221 DCHECK(first.Equals(out));
3222 __ imulq(first.AsRegister<CpuRegister>(),
3223 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003224 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003225 break;
3226 }
3227
Calin Juravleb5bfa962014-10-21 18:02:24 +01003228 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003229 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003230 if (second.IsFpuRegister()) {
3231 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3232 } else if (second.IsConstant()) {
3233 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003234 codegen_->LiteralFloatAddress(
3235 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003236 } else {
3237 DCHECK(second.IsStackSlot());
3238 __ mulss(first.AsFpuRegister<XmmRegister>(),
3239 Address(CpuRegister(RSP), second.GetStackIndex()));
3240 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003241 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003242 }
3243
3244 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003245 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003246 if (second.IsFpuRegister()) {
3247 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3248 } else if (second.IsConstant()) {
3249 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003250 codegen_->LiteralDoubleAddress(
3251 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003252 } else {
3253 DCHECK(second.IsDoubleStackSlot());
3254 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3255 Address(CpuRegister(RSP), second.GetStackIndex()));
3256 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003257 break;
3258 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003259
3260 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003261 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003262 }
3263}
3264
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003265void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3266 uint32_t stack_adjustment, bool is_float) {
3267 if (source.IsStackSlot()) {
3268 DCHECK(is_float);
3269 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3270 } else if (source.IsDoubleStackSlot()) {
3271 DCHECK(!is_float);
3272 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3273 } else {
3274 // Write the value to the temporary location on the stack and load to FP stack.
3275 if (is_float) {
3276 Location stack_temp = Location::StackSlot(temp_offset);
3277 codegen_->Move(stack_temp, source);
3278 __ flds(Address(CpuRegister(RSP), temp_offset));
3279 } else {
3280 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3281 codegen_->Move(stack_temp, source);
3282 __ fldl(Address(CpuRegister(RSP), temp_offset));
3283 }
3284 }
3285}
3286
3287void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3288 Primitive::Type type = rem->GetResultType();
3289 bool is_float = type == Primitive::kPrimFloat;
3290 size_t elem_size = Primitive::ComponentSize(type);
3291 LocationSummary* locations = rem->GetLocations();
3292 Location first = locations->InAt(0);
3293 Location second = locations->InAt(1);
3294 Location out = locations->Out();
3295
3296 // Create stack space for 2 elements.
3297 // TODO: enhance register allocator to ask for stack temporaries.
3298 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3299
3300 // Load the values to the FP stack in reverse order, using temporaries if needed.
3301 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3302 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3303
3304 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003305 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003306 __ Bind(&retry);
3307 __ fprem();
3308
3309 // Move FP status to AX.
3310 __ fstsw();
3311
3312 // And see if the argument reduction is complete. This is signaled by the
3313 // C2 FPU flag bit set to 0.
3314 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3315 __ j(kNotEqual, &retry);
3316
3317 // We have settled on the final value. Retrieve it into an XMM register.
3318 // Store FP top of stack to real stack.
3319 if (is_float) {
3320 __ fsts(Address(CpuRegister(RSP), 0));
3321 } else {
3322 __ fstl(Address(CpuRegister(RSP), 0));
3323 }
3324
3325 // Pop the 2 items from the FP stack.
3326 __ fucompp();
3327
3328 // Load the value from the stack into an XMM register.
3329 DCHECK(out.IsFpuRegister()) << out;
3330 if (is_float) {
3331 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3332 } else {
3333 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3334 }
3335
3336 // And remove the temporary stack space we allocated.
3337 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3338}
3339
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003340void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3341 DCHECK(instruction->IsDiv() || instruction->IsRem());
3342
3343 LocationSummary* locations = instruction->GetLocations();
3344 Location second = locations->InAt(1);
3345 DCHECK(second.IsConstant());
3346
3347 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3348 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003349 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003350
3351 DCHECK(imm == 1 || imm == -1);
3352
3353 switch (instruction->GetResultType()) {
3354 case Primitive::kPrimInt: {
3355 if (instruction->IsRem()) {
3356 __ xorl(output_register, output_register);
3357 } else {
3358 __ movl(output_register, input_register);
3359 if (imm == -1) {
3360 __ negl(output_register);
3361 }
3362 }
3363 break;
3364 }
3365
3366 case Primitive::kPrimLong: {
3367 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003368 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003369 } else {
3370 __ movq(output_register, input_register);
3371 if (imm == -1) {
3372 __ negq(output_register);
3373 }
3374 }
3375 break;
3376 }
3377
3378 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003379 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003380 }
3381}
3382
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003383void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003384 LocationSummary* locations = instruction->GetLocations();
3385 Location second = locations->InAt(1);
3386
3387 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3388 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3389
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003390 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003391 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3392 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003393
3394 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3395
3396 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003397 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003398 __ testl(numerator, numerator);
3399 __ cmov(kGreaterEqual, tmp, numerator);
3400 int shift = CTZ(imm);
3401 __ sarl(tmp, Immediate(shift));
3402
3403 if (imm < 0) {
3404 __ negl(tmp);
3405 }
3406
3407 __ movl(output_register, tmp);
3408 } else {
3409 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3410 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3411
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003412 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003413 __ addq(rdx, numerator);
3414 __ testq(numerator, numerator);
3415 __ cmov(kGreaterEqual, rdx, numerator);
3416 int shift = CTZ(imm);
3417 __ sarq(rdx, Immediate(shift));
3418
3419 if (imm < 0) {
3420 __ negq(rdx);
3421 }
3422
3423 __ movq(output_register, rdx);
3424 }
3425}
3426
3427void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3428 DCHECK(instruction->IsDiv() || instruction->IsRem());
3429
3430 LocationSummary* locations = instruction->GetLocations();
3431 Location second = locations->InAt(1);
3432
3433 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3434 : locations->GetTemp(0).AsRegister<CpuRegister>();
3435 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3436 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3437 : locations->Out().AsRegister<CpuRegister>();
3438 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3439
3440 DCHECK_EQ(RAX, eax.AsRegister());
3441 DCHECK_EQ(RDX, edx.AsRegister());
3442 if (instruction->IsDiv()) {
3443 DCHECK_EQ(RAX, out.AsRegister());
3444 } else {
3445 DCHECK_EQ(RDX, out.AsRegister());
3446 }
3447
3448 int64_t magic;
3449 int shift;
3450
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003451 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003452 if (instruction->GetResultType() == Primitive::kPrimInt) {
3453 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3454
3455 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3456
3457 __ movl(numerator, eax);
3458
Mark Mendell0c9497d2015-08-21 09:30:05 -04003459 NearLabel no_div;
3460 NearLabel end;
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003461 __ testl(eax, eax);
3462 __ j(kNotEqual, &no_div);
3463
3464 __ xorl(out, out);
3465 __ jmp(&end);
3466
3467 __ Bind(&no_div);
3468
3469 __ movl(eax, Immediate(magic));
3470 __ imull(numerator);
3471
3472 if (imm > 0 && magic < 0) {
3473 __ addl(edx, numerator);
3474 } else if (imm < 0 && magic > 0) {
3475 __ subl(edx, numerator);
3476 }
3477
3478 if (shift != 0) {
3479 __ sarl(edx, Immediate(shift));
3480 }
3481
3482 __ movl(eax, edx);
3483 __ shrl(edx, Immediate(31));
3484 __ addl(edx, eax);
3485
3486 if (instruction->IsRem()) {
3487 __ movl(eax, numerator);
3488 __ imull(edx, Immediate(imm));
3489 __ subl(eax, edx);
3490 __ movl(edx, eax);
3491 } else {
3492 __ movl(eax, edx);
3493 }
3494 __ Bind(&end);
3495 } else {
3496 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3497
3498 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3499
3500 CpuRegister rax = eax;
3501 CpuRegister rdx = edx;
3502
3503 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3504
3505 // Save the numerator.
3506 __ movq(numerator, rax);
3507
3508 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003509 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003510
3511 // RDX:RAX = magic * numerator
3512 __ imulq(numerator);
3513
3514 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003515 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003516 __ addq(rdx, numerator);
3517 } else if (imm < 0 && magic > 0) {
3518 // RDX -= numerator
3519 __ subq(rdx, numerator);
3520 }
3521
3522 // Shift if needed.
3523 if (shift != 0) {
3524 __ sarq(rdx, Immediate(shift));
3525 }
3526
3527 // RDX += 1 if RDX < 0
3528 __ movq(rax, rdx);
3529 __ shrq(rdx, Immediate(63));
3530 __ addq(rdx, rax);
3531
3532 if (instruction->IsRem()) {
3533 __ movq(rax, numerator);
3534
3535 if (IsInt<32>(imm)) {
3536 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3537 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003538 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003539 }
3540
3541 __ subq(rax, rdx);
3542 __ movq(rdx, rax);
3543 } else {
3544 __ movq(rax, rdx);
3545 }
3546 }
3547}
3548
Calin Juravlebacfec32014-11-14 15:54:36 +00003549void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3550 DCHECK(instruction->IsDiv() || instruction->IsRem());
3551 Primitive::Type type = instruction->GetResultType();
3552 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3553
3554 bool is_div = instruction->IsDiv();
3555 LocationSummary* locations = instruction->GetLocations();
3556
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003557 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3558 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003559
Roland Levillain271ab9c2014-11-27 15:23:57 +00003560 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003561 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003562
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003563 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003564 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003565
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003566 if (imm == 0) {
3567 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3568 } else if (imm == 1 || imm == -1) {
3569 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003570 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003571 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003572 } else {
3573 DCHECK(imm <= -2 || imm >= 2);
3574 GenerateDivRemWithAnyConstant(instruction);
3575 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003576 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003577 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003579 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003580 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003581
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003582 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3583 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3584 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3585 // so it's safe to just use negl instead of more complex comparisons.
3586 if (type == Primitive::kPrimInt) {
3587 __ cmpl(second_reg, Immediate(-1));
3588 __ j(kEqual, slow_path->GetEntryLabel());
3589 // edx:eax <- sign-extended of eax
3590 __ cdq();
3591 // eax = quotient, edx = remainder
3592 __ idivl(second_reg);
3593 } else {
3594 __ cmpq(second_reg, Immediate(-1));
3595 __ j(kEqual, slow_path->GetEntryLabel());
3596 // rdx:rax <- sign-extended of rax
3597 __ cqo();
3598 // rax = quotient, rdx = remainder
3599 __ idivq(second_reg);
3600 }
3601 __ Bind(slow_path->GetExitLabel());
3602 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003603}
3604
Calin Juravle7c4954d2014-10-28 16:57:40 +00003605void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3606 LocationSummary* locations =
3607 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3608 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003609 case Primitive::kPrimInt:
3610 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003611 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003612 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003613 locations->SetOut(Location::SameAsFirstInput());
3614 // Intel uses edx:eax as the dividend.
3615 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003616 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3617 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3618 // output and request another temp.
3619 if (div->InputAt(1)->IsConstant()) {
3620 locations->AddTemp(Location::RequiresRegister());
3621 }
Calin Juravled0d48522014-11-04 16:40:20 +00003622 break;
3623 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003624
Calin Juravle7c4954d2014-10-28 16:57:40 +00003625 case Primitive::kPrimFloat:
3626 case Primitive::kPrimDouble: {
3627 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003628 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003629 locations->SetOut(Location::SameAsFirstInput());
3630 break;
3631 }
3632
3633 default:
3634 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3635 }
3636}
3637
3638void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3639 LocationSummary* locations = div->GetLocations();
3640 Location first = locations->InAt(0);
3641 Location second = locations->InAt(1);
3642 DCHECK(first.Equals(locations->Out()));
3643
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003644 Primitive::Type type = div->GetResultType();
3645 switch (type) {
3646 case Primitive::kPrimInt:
3647 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003648 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003649 break;
3650 }
3651
Calin Juravle7c4954d2014-10-28 16:57:40 +00003652 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003653 if (second.IsFpuRegister()) {
3654 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3655 } else if (second.IsConstant()) {
3656 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003657 codegen_->LiteralFloatAddress(
3658 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003659 } else {
3660 DCHECK(second.IsStackSlot());
3661 __ divss(first.AsFpuRegister<XmmRegister>(),
3662 Address(CpuRegister(RSP), second.GetStackIndex()));
3663 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003664 break;
3665 }
3666
3667 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003668 if (second.IsFpuRegister()) {
3669 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3670 } else if (second.IsConstant()) {
3671 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003672 codegen_->LiteralDoubleAddress(
3673 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003674 } else {
3675 DCHECK(second.IsDoubleStackSlot());
3676 __ divsd(first.AsFpuRegister<XmmRegister>(),
3677 Address(CpuRegister(RSP), second.GetStackIndex()));
3678 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003679 break;
3680 }
3681
3682 default:
3683 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3684 }
3685}
3686
Calin Juravlebacfec32014-11-14 15:54:36 +00003687void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003688 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003689 LocationSummary* locations =
3690 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003691
3692 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003693 case Primitive::kPrimInt:
3694 case Primitive::kPrimLong: {
3695 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003696 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003697 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3698 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003699 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3700 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3701 // output and request another temp.
3702 if (rem->InputAt(1)->IsConstant()) {
3703 locations->AddTemp(Location::RequiresRegister());
3704 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003705 break;
3706 }
3707
3708 case Primitive::kPrimFloat:
3709 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003710 locations->SetInAt(0, Location::Any());
3711 locations->SetInAt(1, Location::Any());
3712 locations->SetOut(Location::RequiresFpuRegister());
3713 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003714 break;
3715 }
3716
3717 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003718 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003719 }
3720}
3721
3722void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3723 Primitive::Type type = rem->GetResultType();
3724 switch (type) {
3725 case Primitive::kPrimInt:
3726 case Primitive::kPrimLong: {
3727 GenerateDivRemIntegral(rem);
3728 break;
3729 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003730 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003731 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003732 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003733 break;
3734 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003735 default:
3736 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3737 }
3738}
3739
Calin Juravled0d48522014-11-04 16:40:20 +00003740void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003741 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3742 ? LocationSummary::kCallOnSlowPath
3743 : LocationSummary::kNoCall;
3744 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003745 locations->SetInAt(0, Location::Any());
3746 if (instruction->HasUses()) {
3747 locations->SetOut(Location::SameAsFirstInput());
3748 }
3749}
3750
3751void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003752 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003753 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3754 codegen_->AddSlowPath(slow_path);
3755
3756 LocationSummary* locations = instruction->GetLocations();
3757 Location value = locations->InAt(0);
3758
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003759 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003760 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003761 case Primitive::kPrimByte:
3762 case Primitive::kPrimChar:
3763 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003764 case Primitive::kPrimInt: {
3765 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003766 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003767 __ j(kEqual, slow_path->GetEntryLabel());
3768 } else if (value.IsStackSlot()) {
3769 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3770 __ j(kEqual, slow_path->GetEntryLabel());
3771 } else {
3772 DCHECK(value.IsConstant()) << value;
3773 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3774 __ jmp(slow_path->GetEntryLabel());
3775 }
3776 }
3777 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003778 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003779 case Primitive::kPrimLong: {
3780 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003781 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003782 __ j(kEqual, slow_path->GetEntryLabel());
3783 } else if (value.IsDoubleStackSlot()) {
3784 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3785 __ j(kEqual, slow_path->GetEntryLabel());
3786 } else {
3787 DCHECK(value.IsConstant()) << value;
3788 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3789 __ jmp(slow_path->GetEntryLabel());
3790 }
3791 }
3792 break;
3793 }
3794 default:
3795 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003796 }
Calin Juravled0d48522014-11-04 16:40:20 +00003797}
3798
Calin Juravle9aec02f2014-11-18 23:06:35 +00003799void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3800 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3801
3802 LocationSummary* locations =
3803 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3804
3805 switch (op->GetResultType()) {
3806 case Primitive::kPrimInt:
3807 case Primitive::kPrimLong: {
3808 locations->SetInAt(0, Location::RequiresRegister());
3809 // The shift count needs to be in CL.
3810 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3811 locations->SetOut(Location::SameAsFirstInput());
3812 break;
3813 }
3814 default:
3815 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3816 }
3817}
3818
3819void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3820 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3821
3822 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003823 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003824 Location second = locations->InAt(1);
3825
3826 switch (op->GetResultType()) {
3827 case Primitive::kPrimInt: {
3828 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003829 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003830 if (op->IsShl()) {
3831 __ shll(first_reg, second_reg);
3832 } else if (op->IsShr()) {
3833 __ sarl(first_reg, second_reg);
3834 } else {
3835 __ shrl(first_reg, second_reg);
3836 }
3837 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003838 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003839 if (op->IsShl()) {
3840 __ shll(first_reg, imm);
3841 } else if (op->IsShr()) {
3842 __ sarl(first_reg, imm);
3843 } else {
3844 __ shrl(first_reg, imm);
3845 }
3846 }
3847 break;
3848 }
3849 case Primitive::kPrimLong: {
3850 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003851 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003852 if (op->IsShl()) {
3853 __ shlq(first_reg, second_reg);
3854 } else if (op->IsShr()) {
3855 __ sarq(first_reg, second_reg);
3856 } else {
3857 __ shrq(first_reg, second_reg);
3858 }
3859 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003860 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003861 if (op->IsShl()) {
3862 __ shlq(first_reg, imm);
3863 } else if (op->IsShr()) {
3864 __ sarq(first_reg, imm);
3865 } else {
3866 __ shrq(first_reg, imm);
3867 }
3868 }
3869 break;
3870 }
3871 default:
3872 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003873 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003874 }
3875}
3876
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003877void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3878 LocationSummary* locations =
3879 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3880
3881 switch (ror->GetResultType()) {
3882 case Primitive::kPrimInt:
3883 case Primitive::kPrimLong: {
3884 locations->SetInAt(0, Location::RequiresRegister());
3885 // The shift count needs to be in CL (unless it is a constant).
3886 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3887 locations->SetOut(Location::SameAsFirstInput());
3888 break;
3889 }
3890 default:
3891 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3892 UNREACHABLE();
3893 }
3894}
3895
3896void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3897 LocationSummary* locations = ror->GetLocations();
3898 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3899 Location second = locations->InAt(1);
3900
3901 switch (ror->GetResultType()) {
3902 case Primitive::kPrimInt:
3903 if (second.IsRegister()) {
3904 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3905 __ rorl(first_reg, second_reg);
3906 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003907 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003908 __ rorl(first_reg, imm);
3909 }
3910 break;
3911 case Primitive::kPrimLong:
3912 if (second.IsRegister()) {
3913 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3914 __ rorq(first_reg, second_reg);
3915 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003916 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003917 __ rorq(first_reg, imm);
3918 }
3919 break;
3920 default:
3921 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3922 UNREACHABLE();
3923 }
3924}
3925
Calin Juravle9aec02f2014-11-18 23:06:35 +00003926void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3927 HandleShift(shl);
3928}
3929
3930void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3931 HandleShift(shl);
3932}
3933
3934void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3935 HandleShift(shr);
3936}
3937
3938void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3939 HandleShift(shr);
3940}
3941
3942void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3943 HandleShift(ushr);
3944}
3945
3946void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3947 HandleShift(ushr);
3948}
3949
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003950void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003951 LocationSummary* locations =
3952 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003953 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003954 if (instruction->IsStringAlloc()) {
3955 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3956 } else {
3957 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3958 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3959 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003960 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003961}
3962
3963void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003964 // Note: if heap poisoning is enabled, the entry point takes cares
3965 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003966 if (instruction->IsStringAlloc()) {
3967 // String is allocated through StringFactory. Call NewEmptyString entry point.
3968 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3969 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3970 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3971 __ call(Address(temp, code_offset.SizeValue()));
3972 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3973 } else {
3974 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3975 instruction,
3976 instruction->GetDexPc(),
3977 nullptr);
3978 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3979 DCHECK(!codegen_->IsLeafMethod());
3980 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003981}
3982
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003983void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3984 LocationSummary* locations =
3985 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3986 InvokeRuntimeCallingConvention calling_convention;
3987 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003988 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003989 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003990 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003991}
3992
3993void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3994 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003995 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3996 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003997 // Note: if heap poisoning is enabled, the entry point takes cares
3998 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003999 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4000 instruction,
4001 instruction->GetDexPc(),
4002 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004003 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004004
4005 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004006}
4007
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004008void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004009 LocationSummary* locations =
4010 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004011 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4012 if (location.IsStackSlot()) {
4013 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4014 } else if (location.IsDoubleStackSlot()) {
4015 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4016 }
4017 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004018}
4019
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004020void InstructionCodeGeneratorX86_64::VisitParameterValue(
4021 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004022 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004023}
4024
4025void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4026 LocationSummary* locations =
4027 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4028 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4029}
4030
4031void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4032 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4033 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004034}
4035
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004036void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4037 LocationSummary* locations =
4038 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4039 locations->SetInAt(0, Location::RequiresRegister());
4040 locations->SetOut(Location::RequiresRegister());
4041}
4042
4043void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4044 LocationSummary* locations = instruction->GetLocations();
4045 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00004046 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004047 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4048 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
4049 } else {
4050 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
4051 instruction->GetIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
4052 }
4053 __ movq(locations->Out().AsRegister<CpuRegister>(),
4054 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
4055}
4056
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004057void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004058 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004059 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004060 locations->SetInAt(0, Location::RequiresRegister());
4061 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004062}
4063
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004064void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4065 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004066 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4067 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004068 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004069 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004070 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004071 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004072 break;
4073
4074 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004075 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004076 break;
4077
4078 default:
4079 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4080 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004081}
4082
David Brazdil66d126e2015-04-03 16:02:44 +01004083void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4084 LocationSummary* locations =
4085 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4086 locations->SetInAt(0, Location::RequiresRegister());
4087 locations->SetOut(Location::SameAsFirstInput());
4088}
4089
4090void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004091 LocationSummary* locations = bool_not->GetLocations();
4092 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4093 locations->Out().AsRegister<CpuRegister>().AsRegister());
4094 Location out = locations->Out();
4095 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4096}
4097
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004098void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004099 LocationSummary* locations =
4100 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004101 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4102 locations->SetInAt(i, Location::Any());
4103 }
4104 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004105}
4106
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004107void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004108 LOG(FATAL) << "Unimplemented";
4109}
4110
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004111void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004112 /*
4113 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004114 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004115 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4116 */
4117 switch (kind) {
4118 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004119 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004120 break;
4121 }
4122 case MemBarrierKind::kAnyStore:
4123 case MemBarrierKind::kLoadAny:
4124 case MemBarrierKind::kStoreStore: {
4125 // nop
4126 break;
4127 }
4128 default:
4129 LOG(FATAL) << "Unexpected memory barier " << kind;
4130 }
4131}
4132
4133void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4134 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4135
Roland Levillain0d5a2812015-11-13 10:07:31 +00004136 bool object_field_get_with_read_barrier =
4137 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004138 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004139 new (GetGraph()->GetArena()) LocationSummary(instruction,
4140 object_field_get_with_read_barrier ?
4141 LocationSummary::kCallOnSlowPath :
4142 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004143 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004144 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4145 locations->SetOut(Location::RequiresFpuRegister());
4146 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004147 // The output overlaps for an object field get when read barriers
4148 // are enabled: we do not want the move to overwrite the object's
4149 // location, as we need it to emit the read barrier.
4150 locations->SetOut(
4151 Location::RequiresRegister(),
4152 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004153 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004154 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4155 // We need a temporary register for the read barrier marking slow
4156 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4157 locations->AddTemp(Location::RequiresRegister());
4158 }
Calin Juravle52c48962014-12-16 17:02:57 +00004159}
4160
4161void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4162 const FieldInfo& field_info) {
4163 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4164
4165 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004166 Location base_loc = locations->InAt(0);
4167 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004168 Location out = locations->Out();
4169 bool is_volatile = field_info.IsVolatile();
4170 Primitive::Type field_type = field_info.GetFieldType();
4171 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4172
4173 switch (field_type) {
4174 case Primitive::kPrimBoolean: {
4175 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4176 break;
4177 }
4178
4179 case Primitive::kPrimByte: {
4180 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4181 break;
4182 }
4183
4184 case Primitive::kPrimShort: {
4185 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4186 break;
4187 }
4188
4189 case Primitive::kPrimChar: {
4190 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4191 break;
4192 }
4193
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004194 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004195 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4196 break;
4197 }
4198
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004199 case Primitive::kPrimNot: {
4200 // /* HeapReference<Object> */ out = *(base + offset)
4201 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4202 Location temp_loc = locations->GetTemp(0);
4203 // Note that a potential implicit null check is handled in this
4204 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4205 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4206 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4207 if (is_volatile) {
4208 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4209 }
4210 } else {
4211 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4212 codegen_->MaybeRecordImplicitNullCheck(instruction);
4213 if (is_volatile) {
4214 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4215 }
4216 // If read barriers are enabled, emit read barriers other than
4217 // Baker's using a slow path (and also unpoison the loaded
4218 // reference, if heap poisoning is enabled).
4219 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4220 }
4221 break;
4222 }
4223
Calin Juravle52c48962014-12-16 17:02:57 +00004224 case Primitive::kPrimLong: {
4225 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4226 break;
4227 }
4228
4229 case Primitive::kPrimFloat: {
4230 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4231 break;
4232 }
4233
4234 case Primitive::kPrimDouble: {
4235 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4236 break;
4237 }
4238
4239 case Primitive::kPrimVoid:
4240 LOG(FATAL) << "Unreachable type " << field_type;
4241 UNREACHABLE();
4242 }
4243
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004244 if (field_type == Primitive::kPrimNot) {
4245 // Potential implicit null checks, in the case of reference
4246 // fields, are handled in the previous switch statement.
4247 } else {
4248 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004249 }
Roland Levillain4d027112015-07-01 15:41:14 +01004250
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004251 if (is_volatile) {
4252 if (field_type == Primitive::kPrimNot) {
4253 // Memory barriers, in the case of references, are also handled
4254 // in the previous switch statement.
4255 } else {
4256 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4257 }
Roland Levillain4d027112015-07-01 15:41:14 +01004258 }
Calin Juravle52c48962014-12-16 17:02:57 +00004259}
4260
4261void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4262 const FieldInfo& field_info) {
4263 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4264
4265 LocationSummary* locations =
4266 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004267 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004268 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004269 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004270 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004271
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004272 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004273 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004274 if (is_volatile) {
4275 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4276 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4277 } else {
4278 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4279 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004280 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004281 if (is_volatile) {
4282 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4283 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4284 } else {
4285 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4286 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004287 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004288 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004289 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004290 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004291 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004292 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4293 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004294 locations->AddTemp(Location::RequiresRegister());
4295 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004296}
4297
Calin Juravle52c48962014-12-16 17:02:57 +00004298void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004299 const FieldInfo& field_info,
4300 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004301 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4302
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004303 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004304 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4305 Location value = locations->InAt(1);
4306 bool is_volatile = field_info.IsVolatile();
4307 Primitive::Type field_type = field_info.GetFieldType();
4308 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4309
4310 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004311 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004312 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004313
Mark Mendellea5af682015-10-22 17:35:49 -04004314 bool maybe_record_implicit_null_check_done = false;
4315
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004316 switch (field_type) {
4317 case Primitive::kPrimBoolean:
4318 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004319 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004320 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004321 __ movb(Address(base, offset), Immediate(v));
4322 } else {
4323 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4324 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004325 break;
4326 }
4327
4328 case Primitive::kPrimShort:
4329 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004330 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004331 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004332 __ movw(Address(base, offset), Immediate(v));
4333 } else {
4334 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4335 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004336 break;
4337 }
4338
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004339 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004340 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004341 if (value.IsConstant()) {
4342 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004343 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4344 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4345 // Note: if heap poisoning is enabled, no need to poison
4346 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004347 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004348 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004349 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4350 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4351 __ movl(temp, value.AsRegister<CpuRegister>());
4352 __ PoisonHeapReference(temp);
4353 __ movl(Address(base, offset), temp);
4354 } else {
4355 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4356 }
Mark Mendell40741f32015-04-20 22:10:34 -04004357 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004358 break;
4359 }
4360
4361 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004362 if (value.IsConstant()) {
4363 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004364 codegen_->MoveInt64ToAddress(Address(base, offset),
4365 Address(base, offset + sizeof(int32_t)),
4366 v,
4367 instruction);
4368 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004369 } else {
4370 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4371 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004372 break;
4373 }
4374
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004375 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004376 if (value.IsConstant()) {
4377 int32_t v =
4378 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4379 __ movl(Address(base, offset), Immediate(v));
4380 } else {
4381 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4382 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004383 break;
4384 }
4385
4386 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004387 if (value.IsConstant()) {
4388 int64_t v =
4389 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4390 codegen_->MoveInt64ToAddress(Address(base, offset),
4391 Address(base, offset + sizeof(int32_t)),
4392 v,
4393 instruction);
4394 maybe_record_implicit_null_check_done = true;
4395 } else {
4396 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4397 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004398 break;
4399 }
4400
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004401 case Primitive::kPrimVoid:
4402 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004403 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004404 }
Calin Juravle52c48962014-12-16 17:02:57 +00004405
Mark Mendellea5af682015-10-22 17:35:49 -04004406 if (!maybe_record_implicit_null_check_done) {
4407 codegen_->MaybeRecordImplicitNullCheck(instruction);
4408 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004409
4410 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4411 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4412 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004413 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004414 }
4415
Calin Juravle52c48962014-12-16 17:02:57 +00004416 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004417 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004418 }
4419}
4420
4421void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4422 HandleFieldSet(instruction, instruction->GetFieldInfo());
4423}
4424
4425void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004426 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004427}
4428
4429void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004430 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004431}
4432
4433void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004434 HandleFieldGet(instruction, instruction->GetFieldInfo());
4435}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004436
Calin Juravle52c48962014-12-16 17:02:57 +00004437void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4438 HandleFieldGet(instruction);
4439}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004440
Calin Juravle52c48962014-12-16 17:02:57 +00004441void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4442 HandleFieldGet(instruction, instruction->GetFieldInfo());
4443}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004444
Calin Juravle52c48962014-12-16 17:02:57 +00004445void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4446 HandleFieldSet(instruction, instruction->GetFieldInfo());
4447}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004448
Calin Juravle52c48962014-12-16 17:02:57 +00004449void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004450 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004451}
4452
Calin Juravlee460d1d2015-09-29 04:52:17 +01004453void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4454 HUnresolvedInstanceFieldGet* instruction) {
4455 FieldAccessCallingConventionX86_64 calling_convention;
4456 codegen_->CreateUnresolvedFieldLocationSummary(
4457 instruction, instruction->GetFieldType(), calling_convention);
4458}
4459
4460void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4461 HUnresolvedInstanceFieldGet* instruction) {
4462 FieldAccessCallingConventionX86_64 calling_convention;
4463 codegen_->GenerateUnresolvedFieldAccess(instruction,
4464 instruction->GetFieldType(),
4465 instruction->GetFieldIndex(),
4466 instruction->GetDexPc(),
4467 calling_convention);
4468}
4469
4470void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4471 HUnresolvedInstanceFieldSet* instruction) {
4472 FieldAccessCallingConventionX86_64 calling_convention;
4473 codegen_->CreateUnresolvedFieldLocationSummary(
4474 instruction, instruction->GetFieldType(), calling_convention);
4475}
4476
4477void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4478 HUnresolvedInstanceFieldSet* instruction) {
4479 FieldAccessCallingConventionX86_64 calling_convention;
4480 codegen_->GenerateUnresolvedFieldAccess(instruction,
4481 instruction->GetFieldType(),
4482 instruction->GetFieldIndex(),
4483 instruction->GetDexPc(),
4484 calling_convention);
4485}
4486
4487void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4488 HUnresolvedStaticFieldGet* instruction) {
4489 FieldAccessCallingConventionX86_64 calling_convention;
4490 codegen_->CreateUnresolvedFieldLocationSummary(
4491 instruction, instruction->GetFieldType(), calling_convention);
4492}
4493
4494void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4495 HUnresolvedStaticFieldGet* instruction) {
4496 FieldAccessCallingConventionX86_64 calling_convention;
4497 codegen_->GenerateUnresolvedFieldAccess(instruction,
4498 instruction->GetFieldType(),
4499 instruction->GetFieldIndex(),
4500 instruction->GetDexPc(),
4501 calling_convention);
4502}
4503
4504void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4505 HUnresolvedStaticFieldSet* instruction) {
4506 FieldAccessCallingConventionX86_64 calling_convention;
4507 codegen_->CreateUnresolvedFieldLocationSummary(
4508 instruction, instruction->GetFieldType(), calling_convention);
4509}
4510
4511void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4512 HUnresolvedStaticFieldSet* instruction) {
4513 FieldAccessCallingConventionX86_64 calling_convention;
4514 codegen_->GenerateUnresolvedFieldAccess(instruction,
4515 instruction->GetFieldType(),
4516 instruction->GetFieldIndex(),
4517 instruction->GetDexPc(),
4518 calling_convention);
4519}
4520
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004521void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004522 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4523 ? LocationSummary::kCallOnSlowPath
4524 : LocationSummary::kNoCall;
4525 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4526 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004527 ? Location::RequiresRegister()
4528 : Location::Any();
4529 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004530 if (instruction->HasUses()) {
4531 locations->SetOut(Location::SameAsFirstInput());
4532 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004533}
4534
Calin Juravle2ae48182016-03-16 14:05:09 +00004535void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4536 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004537 return;
4538 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004539 LocationSummary* locations = instruction->GetLocations();
4540 Location obj = locations->InAt(0);
4541
4542 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004543 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004544}
4545
Calin Juravle2ae48182016-03-16 14:05:09 +00004546void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004547 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004548 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004549
4550 LocationSummary* locations = instruction->GetLocations();
4551 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004552
4553 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004554 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004555 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004556 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004557 } else {
4558 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004559 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004560 __ jmp(slow_path->GetEntryLabel());
4561 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004562 }
4563 __ j(kEqual, slow_path->GetEntryLabel());
4564}
4565
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004566void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004567 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004568}
4569
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004570void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004571 bool object_array_get_with_read_barrier =
4572 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004573 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004574 new (GetGraph()->GetArena()) LocationSummary(instruction,
4575 object_array_get_with_read_barrier ?
4576 LocationSummary::kCallOnSlowPath :
4577 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004578 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004579 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004580 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4581 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4582 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004583 // The output overlaps for an object array get when read barriers
4584 // are enabled: we do not want the move to overwrite the array's
4585 // location, as we need it to emit the read barrier.
4586 locations->SetOut(
4587 Location::RequiresRegister(),
4588 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004589 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004590 // We need a temporary register for the read barrier marking slow
4591 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4592 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4593 locations->AddTemp(Location::RequiresRegister());
4594 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004595}
4596
4597void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4598 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004599 Location obj_loc = locations->InAt(0);
4600 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004601 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004602 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004603
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004604 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004605 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004606 case Primitive::kPrimBoolean: {
4607 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004608 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004609 if (index.IsConstant()) {
4610 __ movzxb(out, Address(obj,
4611 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4612 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004613 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004614 }
4615 break;
4616 }
4617
4618 case Primitive::kPrimByte: {
4619 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004620 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004621 if (index.IsConstant()) {
4622 __ movsxb(out, Address(obj,
4623 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4624 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004625 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004626 }
4627 break;
4628 }
4629
4630 case Primitive::kPrimShort: {
4631 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004632 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004633 if (index.IsConstant()) {
4634 __ movsxw(out, Address(obj,
4635 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4636 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004637 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004638 }
4639 break;
4640 }
4641
4642 case Primitive::kPrimChar: {
4643 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004644 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004645 if (index.IsConstant()) {
4646 __ movzxw(out, Address(obj,
4647 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4648 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004649 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004650 }
4651 break;
4652 }
4653
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004654 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004655 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004656 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004657 if (index.IsConstant()) {
4658 __ movl(out, Address(obj,
4659 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4660 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004661 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004662 }
4663 break;
4664 }
4665
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004666 case Primitive::kPrimNot: {
4667 static_assert(
4668 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4669 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4670 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4671 // /* HeapReference<Object> */ out =
4672 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4673 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4674 Location temp = locations->GetTemp(0);
4675 // Note that a potential implicit null check is handled in this
4676 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4677 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4678 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4679 } else {
4680 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4681 if (index.IsConstant()) {
4682 uint32_t offset =
4683 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4684 __ movl(out, Address(obj, offset));
4685 codegen_->MaybeRecordImplicitNullCheck(instruction);
4686 // If read barriers are enabled, emit read barriers other than
4687 // Baker's using a slow path (and also unpoison the loaded
4688 // reference, if heap poisoning is enabled).
4689 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4690 } else {
4691 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4692 codegen_->MaybeRecordImplicitNullCheck(instruction);
4693 // If read barriers are enabled, emit read barriers other than
4694 // Baker's using a slow path (and also unpoison the loaded
4695 // reference, if heap poisoning is enabled).
4696 codegen_->MaybeGenerateReadBarrierSlow(
4697 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4698 }
4699 }
4700 break;
4701 }
4702
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004703 case Primitive::kPrimLong: {
4704 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004705 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004706 if (index.IsConstant()) {
4707 __ movq(out, Address(obj,
4708 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4709 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004710 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004711 }
4712 break;
4713 }
4714
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004715 case Primitive::kPrimFloat: {
4716 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004717 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004718 if (index.IsConstant()) {
4719 __ movss(out, Address(obj,
4720 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4721 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004722 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004723 }
4724 break;
4725 }
4726
4727 case Primitive::kPrimDouble: {
4728 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004729 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004730 if (index.IsConstant()) {
4731 __ movsd(out, Address(obj,
4732 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4733 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004734 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004735 }
4736 break;
4737 }
4738
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004739 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004740 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004741 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004742 }
Roland Levillain4d027112015-07-01 15:41:14 +01004743
4744 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004745 // Potential implicit null checks, in the case of reference
4746 // arrays, are handled in the previous switch statement.
4747 } else {
4748 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004749 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004750}
4751
4752void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004753 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004754
4755 bool needs_write_barrier =
4756 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004757 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004758 bool object_array_set_with_read_barrier =
4759 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004760
Nicolas Geoffray39468442014-09-02 15:17:15 +01004761 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004762 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004763 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004764 LocationSummary::kCallOnSlowPath :
4765 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004766
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004767 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004768 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4769 if (Primitive::IsFloatingPointType(value_type)) {
4770 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004771 } else {
4772 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4773 }
4774
4775 if (needs_write_barrier) {
4776 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004777
4778 // This first temporary register is possibly used for heap
4779 // reference poisoning and/or read barrier emission too.
4780 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004781 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004782 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004783}
4784
4785void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4786 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004787 Location array_loc = locations->InAt(0);
4788 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004789 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004790 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004791 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004792 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004793 bool needs_write_barrier =
4794 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004795 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4796 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4797 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004798
4799 switch (value_type) {
4800 case Primitive::kPrimBoolean:
4801 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004802 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4803 Address address = index.IsConstant()
4804 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4805 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4806 if (value.IsRegister()) {
4807 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004808 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004809 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004810 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004811 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004812 break;
4813 }
4814
4815 case Primitive::kPrimShort:
4816 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004817 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4818 Address address = index.IsConstant()
4819 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4820 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4821 if (value.IsRegister()) {
4822 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004823 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004824 DCHECK(value.IsConstant()) << value;
4825 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004826 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004827 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004828 break;
4829 }
4830
4831 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004832 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4833 Address address = index.IsConstant()
4834 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4835 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004836
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004837 if (!value.IsRegister()) {
4838 // Just setting null.
4839 DCHECK(instruction->InputAt(2)->IsNullConstant());
4840 DCHECK(value.IsConstant()) << value;
4841 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004842 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004843 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004844 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004845 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004846 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004847
4848 DCHECK(needs_write_barrier);
4849 CpuRegister register_value = value.AsRegister<CpuRegister>();
4850 NearLabel done, not_null, do_put;
4851 SlowPathCode* slow_path = nullptr;
4852 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004853 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004854 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4855 codegen_->AddSlowPath(slow_path);
4856 if (instruction->GetValueCanBeNull()) {
4857 __ testl(register_value, register_value);
4858 __ j(kNotEqual, &not_null);
4859 __ movl(address, Immediate(0));
4860 codegen_->MaybeRecordImplicitNullCheck(instruction);
4861 __ jmp(&done);
4862 __ Bind(&not_null);
4863 }
4864
Roland Levillain0d5a2812015-11-13 10:07:31 +00004865 if (kEmitCompilerReadBarrier) {
4866 // When read barriers are enabled, the type checking
4867 // instrumentation requires two read barriers:
4868 //
4869 // __ movl(temp2, temp);
4870 // // /* HeapReference<Class> */ temp = temp->component_type_
4871 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004872 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004873 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4874 //
4875 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4876 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004877 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004878 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4879 //
4880 // __ cmpl(temp, temp2);
4881 //
4882 // However, the second read barrier may trash `temp`, as it
4883 // is a temporary register, and as such would not be saved
4884 // along with live registers before calling the runtime (nor
4885 // restored afterwards). So in this case, we bail out and
4886 // delegate the work to the array set slow path.
4887 //
4888 // TODO: Extend the register allocator to support a new
4889 // "(locally) live temp" location so as to avoid always
4890 // going into the slow path when read barriers are enabled.
4891 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004892 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004893 // /* HeapReference<Class> */ temp = array->klass_
4894 __ movl(temp, Address(array, class_offset));
4895 codegen_->MaybeRecordImplicitNullCheck(instruction);
4896 __ MaybeUnpoisonHeapReference(temp);
4897
4898 // /* HeapReference<Class> */ temp = temp->component_type_
4899 __ movl(temp, Address(temp, component_offset));
4900 // If heap poisoning is enabled, no need to unpoison `temp`
4901 // nor the object reference in `register_value->klass`, as
4902 // we are comparing two poisoned references.
4903 __ cmpl(temp, Address(register_value, class_offset));
4904
4905 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4906 __ j(kEqual, &do_put);
4907 // If heap poisoning is enabled, the `temp` reference has
4908 // not been unpoisoned yet; unpoison it now.
4909 __ MaybeUnpoisonHeapReference(temp);
4910
4911 // /* HeapReference<Class> */ temp = temp->super_class_
4912 __ movl(temp, Address(temp, super_offset));
4913 // If heap poisoning is enabled, no need to unpoison
4914 // `temp`, as we are comparing against null below.
4915 __ testl(temp, temp);
4916 __ j(kNotEqual, slow_path->GetEntryLabel());
4917 __ Bind(&do_put);
4918 } else {
4919 __ j(kNotEqual, slow_path->GetEntryLabel());
4920 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004921 }
4922 }
4923
4924 if (kPoisonHeapReferences) {
4925 __ movl(temp, register_value);
4926 __ PoisonHeapReference(temp);
4927 __ movl(address, temp);
4928 } else {
4929 __ movl(address, register_value);
4930 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004931 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004932 codegen_->MaybeRecordImplicitNullCheck(instruction);
4933 }
4934
4935 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4936 codegen_->MarkGCCard(
4937 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4938 __ Bind(&done);
4939
4940 if (slow_path != nullptr) {
4941 __ Bind(slow_path->GetExitLabel());
4942 }
4943
4944 break;
4945 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004946
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004947 case Primitive::kPrimInt: {
4948 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4949 Address address = index.IsConstant()
4950 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4951 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4952 if (value.IsRegister()) {
4953 __ movl(address, value.AsRegister<CpuRegister>());
4954 } else {
4955 DCHECK(value.IsConstant()) << value;
4956 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4957 __ movl(address, Immediate(v));
4958 }
4959 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004960 break;
4961 }
4962
4963 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004964 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4965 Address address = index.IsConstant()
4966 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4967 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4968 if (value.IsRegister()) {
4969 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004970 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004971 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004972 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004973 Address address_high = index.IsConstant()
4974 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4975 offset + sizeof(int32_t))
4976 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4977 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004978 }
4979 break;
4980 }
4981
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004982 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004983 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4984 Address address = index.IsConstant()
4985 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4986 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004987 if (value.IsFpuRegister()) {
4988 __ movss(address, value.AsFpuRegister<XmmRegister>());
4989 } else {
4990 DCHECK(value.IsConstant());
4991 int32_t v =
4992 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4993 __ movl(address, Immediate(v));
4994 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004995 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004996 break;
4997 }
4998
4999 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005000 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
5001 Address address = index.IsConstant()
5002 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
5003 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005004 if (value.IsFpuRegister()) {
5005 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5006 codegen_->MaybeRecordImplicitNullCheck(instruction);
5007 } else {
5008 int64_t v =
5009 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5010 Address address_high = index.IsConstant()
5011 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
5012 offset + sizeof(int32_t))
5013 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
5014 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5015 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005016 break;
5017 }
5018
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005019 case Primitive::kPrimVoid:
5020 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005021 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005022 }
5023}
5024
5025void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005026 LocationSummary* locations =
5027 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005028 locations->SetInAt(0, Location::RequiresRegister());
5029 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005030}
5031
5032void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
5033 LocationSummary* locations = instruction->GetLocations();
5034 uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
Roland Levillain271ab9c2014-11-27 15:23:57 +00005035 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5036 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005037 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005038 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005039}
5040
5041void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005042 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5043 ? LocationSummary::kCallOnSlowPath
5044 : LocationSummary::kNoCall;
5045 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005046 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04005047 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005048 if (instruction->HasUses()) {
5049 locations->SetOut(Location::SameAsFirstInput());
5050 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005051}
5052
5053void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5054 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005055 Location index_loc = locations->InAt(0);
5056 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005057 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005058 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005059
Mark Mendell99dbd682015-04-22 16:18:52 -04005060 if (length_loc.IsConstant()) {
5061 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5062 if (index_loc.IsConstant()) {
5063 // BCE will remove the bounds check if we are guarenteed to pass.
5064 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5065 if (index < 0 || index >= length) {
5066 codegen_->AddSlowPath(slow_path);
5067 __ jmp(slow_path->GetEntryLabel());
5068 } else {
5069 // Some optimization after BCE may have generated this, and we should not
5070 // generate a bounds check if it is a valid range.
5071 }
5072 return;
5073 }
5074
5075 // We have to reverse the jump condition because the length is the constant.
5076 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5077 __ cmpl(index_reg, Immediate(length));
5078 codegen_->AddSlowPath(slow_path);
5079 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005080 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005081 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5082 if (index_loc.IsConstant()) {
5083 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5084 __ cmpl(length, Immediate(value));
5085 } else {
5086 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5087 }
5088 codegen_->AddSlowPath(slow_path);
5089 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005090 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005091}
5092
5093void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5094 CpuRegister card,
5095 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005096 CpuRegister value,
5097 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005098 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005099 if (value_can_be_null) {
5100 __ testl(value, value);
5101 __ j(kEqual, &is_null);
5102 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005103 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5104 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005105 __ movq(temp, object);
5106 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005107 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005108 if (value_can_be_null) {
5109 __ Bind(&is_null);
5110 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005111}
5112
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005113void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005114 LOG(FATAL) << "Unimplemented";
5115}
5116
5117void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005118 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5119}
5120
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005121void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5122 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5123}
5124
5125void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005126 HBasicBlock* block = instruction->GetBlock();
5127 if (block->GetLoopInformation() != nullptr) {
5128 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5129 // The back edge will generate the suspend check.
5130 return;
5131 }
5132 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5133 // The goto will generate the suspend check.
5134 return;
5135 }
5136 GenerateSuspendCheck(instruction, nullptr);
5137}
5138
5139void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5140 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005141 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005142 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5143 if (slow_path == nullptr) {
5144 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5145 instruction->SetSlowPath(slow_path);
5146 codegen_->AddSlowPath(slow_path);
5147 if (successor != nullptr) {
5148 DCHECK(successor->IsLoopHeader());
5149 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5150 }
5151 } else {
5152 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5153 }
5154
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005155 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5156 /* no_rip */ true),
5157 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005158 if (successor == nullptr) {
5159 __ j(kNotEqual, slow_path->GetEntryLabel());
5160 __ Bind(slow_path->GetReturnLabel());
5161 } else {
5162 __ j(kEqual, codegen_->GetLabelOf(successor));
5163 __ jmp(slow_path->GetEntryLabel());
5164 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005165}
5166
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005167X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5168 return codegen_->GetAssembler();
5169}
5170
5171void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005172 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005173 Location source = move->GetSource();
5174 Location destination = move->GetDestination();
5175
5176 if (source.IsRegister()) {
5177 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005178 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005179 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005180 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005181 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005182 } else {
5183 DCHECK(destination.IsDoubleStackSlot());
5184 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005185 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005186 }
5187 } else if (source.IsStackSlot()) {
5188 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005189 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005190 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005191 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005192 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005193 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005194 } else {
5195 DCHECK(destination.IsStackSlot());
5196 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5197 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5198 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005199 } else if (source.IsDoubleStackSlot()) {
5200 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005201 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005202 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005203 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005204 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5205 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005206 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005207 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005208 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5209 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5210 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005211 } else if (source.IsConstant()) {
5212 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005213 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5214 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005215 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005216 if (value == 0) {
5217 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5218 } else {
5219 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5220 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005221 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005222 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005223 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005224 }
5225 } else if (constant->IsLongConstant()) {
5226 int64_t value = constant->AsLongConstant()->GetValue();
5227 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005228 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005229 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005230 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005231 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005232 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005233 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005234 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005235 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005236 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005237 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005238 } else {
5239 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005240 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005241 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5242 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005243 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005244 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005245 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005246 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005247 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005248 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005249 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005250 } else {
5251 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005252 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005253 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005254 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005255 } else if (source.IsFpuRegister()) {
5256 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005257 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005258 } else if (destination.IsStackSlot()) {
5259 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005260 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005261 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005262 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005263 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005264 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005265 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005266 }
5267}
5268
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005269void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005270 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005271 __ movl(Address(CpuRegister(RSP), mem), reg);
5272 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005273}
5274
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005275void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005276 ScratchRegisterScope ensure_scratch(
5277 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5278
5279 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5280 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5281 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5282 Address(CpuRegister(RSP), mem2 + stack_offset));
5283 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5284 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5285 CpuRegister(ensure_scratch.GetRegister()));
5286}
5287
Mark Mendell8a1c7282015-06-29 15:41:28 -04005288void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5289 __ movq(CpuRegister(TMP), reg1);
5290 __ movq(reg1, reg2);
5291 __ movq(reg2, CpuRegister(TMP));
5292}
5293
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005294void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5295 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5296 __ movq(Address(CpuRegister(RSP), mem), reg);
5297 __ movq(reg, CpuRegister(TMP));
5298}
5299
5300void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5301 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005302 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005303
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005304 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5305 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5306 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5307 Address(CpuRegister(RSP), mem2 + stack_offset));
5308 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5309 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5310 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005311}
5312
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005313void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5314 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5315 __ movss(Address(CpuRegister(RSP), mem), reg);
5316 __ movd(reg, CpuRegister(TMP));
5317}
5318
5319void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5320 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5321 __ movsd(Address(CpuRegister(RSP), mem), reg);
5322 __ movd(reg, CpuRegister(TMP));
5323}
5324
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005325void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005326 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005327 Location source = move->GetSource();
5328 Location destination = move->GetDestination();
5329
5330 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005331 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005332 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005333 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005334 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005335 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005336 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005337 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5338 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005339 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005340 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005341 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005342 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5343 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005344 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005345 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5346 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5347 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005348 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005349 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005350 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005351 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005352 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005353 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005354 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005355 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005356 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005357 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005358 }
5359}
5360
5361
5362void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5363 __ pushq(CpuRegister(reg));
5364}
5365
5366
5367void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5368 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005369}
5370
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005371void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005372 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005373 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5374 Immediate(mirror::Class::kStatusInitialized));
5375 __ j(kLess, slow_path->GetEntryLabel());
5376 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005377 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005378}
5379
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005380void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005381 InvokeRuntimeCallingConvention calling_convention;
5382 CodeGenerator::CreateLoadClassLocationSummary(
5383 cls,
5384 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005385 Location::RegisterLocation(RAX),
5386 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005387}
5388
5389void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005390 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005391 if (cls->NeedsAccessCheck()) {
5392 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5393 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5394 cls,
5395 cls->GetDexPc(),
5396 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005397 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005398 return;
5399 }
5400
Roland Levillain0d5a2812015-11-13 10:07:31 +00005401 Location out_loc = locations->Out();
5402 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005403 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005404
Calin Juravle580b6092015-10-06 17:35:58 +01005405 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005406 DCHECK(!cls->CanCallRuntime());
5407 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005408 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5409 GenerateGcRootFieldLoad(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005410 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005411 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005412 // /* GcRoot<mirror::Class>[] */ out =
5413 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5414 __ movq(out, Address(current_method,
5415 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005416 // /* GcRoot<mirror::Class> */ out = out[type_index]
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005417 GenerateGcRootFieldLoad(
5418 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Roland Levillain4d027112015-07-01 15:41:14 +01005419
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005420 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5421 DCHECK(cls->CanCallRuntime());
5422 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5423 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5424 codegen_->AddSlowPath(slow_path);
5425 if (!cls->IsInDexCache()) {
5426 __ testl(out, out);
5427 __ j(kEqual, slow_path->GetEntryLabel());
5428 }
5429 if (cls->MustGenerateClinitCheck()) {
5430 GenerateClassInitializationCheck(slow_path, out);
5431 } else {
5432 __ Bind(slow_path->GetExitLabel());
5433 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005434 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005435 }
5436}
5437
5438void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5439 LocationSummary* locations =
5440 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5441 locations->SetInAt(0, Location::RequiresRegister());
5442 if (check->HasUses()) {
5443 locations->SetOut(Location::SameAsFirstInput());
5444 }
5445}
5446
5447void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005448 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005449 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005450 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005451 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005452 GenerateClassInitializationCheck(slow_path,
5453 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005454}
5455
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005456HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5457 HLoadString::LoadKind desired_string_load_kind) {
5458 if (kEmitCompilerReadBarrier) {
5459 switch (desired_string_load_kind) {
5460 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5461 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5462 case HLoadString::LoadKind::kBootImageAddress:
5463 // TODO: Implement for read barrier.
5464 return HLoadString::LoadKind::kDexCacheViaMethod;
5465 default:
5466 break;
5467 }
5468 }
5469 switch (desired_string_load_kind) {
5470 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5471 DCHECK(!GetCompilerOptions().GetCompilePic());
5472 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5473 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5474 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5475 DCHECK(GetCompilerOptions().GetCompilePic());
5476 break;
5477 case HLoadString::LoadKind::kBootImageAddress:
5478 break;
5479 case HLoadString::LoadKind::kDexCacheAddress:
5480 DCHECK(Runtime::Current()->UseJit());
5481 break;
5482 case HLoadString::LoadKind::kDexCachePcRelative:
5483 DCHECK(!Runtime::Current()->UseJit());
5484 break;
5485 case HLoadString::LoadKind::kDexCacheViaMethod:
5486 break;
5487 }
5488 return desired_string_load_kind;
5489}
5490
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005491void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005492 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005493 ? LocationSummary::kCallOnSlowPath
5494 : LocationSummary::kNoCall;
5495 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005496 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5497 locations->SetInAt(0, Location::RequiresRegister());
5498 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005499 locations->SetOut(Location::RequiresRegister());
5500}
5501
5502void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005503 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005504 Location out_loc = locations->Out();
5505 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005506
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005507 switch (load->GetLoadKind()) {
5508 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5509 DCHECK(!kEmitCompilerReadBarrier);
5510 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5511 codegen_->RecordStringPatch(load);
5512 return; // No dex cache slow path.
5513 }
5514 case HLoadString::LoadKind::kBootImageAddress: {
5515 DCHECK(!kEmitCompilerReadBarrier);
5516 DCHECK_NE(load->GetAddress(), 0u);
5517 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5518 __ movl(out, Immediate(address)); // Zero-extended.
5519 codegen_->RecordSimplePatch();
5520 return; // No dex cache slow path.
5521 }
5522 case HLoadString::LoadKind::kDexCacheAddress: {
5523 DCHECK_NE(load->GetAddress(), 0u);
5524 if (IsUint<32>(load->GetAddress())) {
5525 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5526 GenerateGcRootFieldLoad(load, out_loc, address);
5527 } else {
5528 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5529 __ movq(out, Immediate(load->GetAddress()));
5530 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5531 }
5532 break;
5533 }
5534 case HLoadString::LoadKind::kDexCachePcRelative: {
5535 uint32_t offset = load->GetDexCacheElementOffset();
5536 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5537 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5538 /* no_rip */ false);
5539 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5540 break;
5541 }
5542 case HLoadString::LoadKind::kDexCacheViaMethod: {
5543 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5544
5545 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5546 GenerateGcRootFieldLoad(
5547 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5548 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5549 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5550 // /* GcRoot<mirror::String> */ out = out[string_index]
5551 GenerateGcRootFieldLoad(
5552 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5553 break;
5554 }
5555 default:
5556 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5557 UNREACHABLE();
5558 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005559
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005560 if (!load->IsInDexCache()) {
5561 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5562 codegen_->AddSlowPath(slow_path);
5563 __ testl(out, out);
5564 __ j(kEqual, slow_path->GetEntryLabel());
5565 __ Bind(slow_path->GetExitLabel());
5566 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005567}
5568
David Brazdilcb1c0552015-08-04 16:22:25 +01005569static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005570 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5571 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005572}
5573
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005574void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5575 LocationSummary* locations =
5576 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5577 locations->SetOut(Location::RequiresRegister());
5578}
5579
5580void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005581 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5582}
5583
5584void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5585 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5586}
5587
5588void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5589 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005590}
5591
5592void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5593 LocationSummary* locations =
5594 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5595 InvokeRuntimeCallingConvention calling_convention;
5596 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5597}
5598
5599void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005600 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5601 instruction,
5602 instruction->GetDexPc(),
5603 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005604 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005605}
5606
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005607static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5608 return kEmitCompilerReadBarrier &&
5609 (kUseBakerReadBarrier ||
5610 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5611 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5612 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5613}
5614
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005615void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005616 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005617 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5618 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005619 case TypeCheckKind::kExactCheck:
5620 case TypeCheckKind::kAbstractClassCheck:
5621 case TypeCheckKind::kClassHierarchyCheck:
5622 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005623 call_kind =
5624 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005625 break;
5626 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005627 case TypeCheckKind::kUnresolvedCheck:
5628 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005629 call_kind = LocationSummary::kCallOnSlowPath;
5630 break;
5631 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005632
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005633 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005634 locations->SetInAt(0, Location::RequiresRegister());
5635 locations->SetInAt(1, Location::Any());
5636 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5637 locations->SetOut(Location::RequiresRegister());
5638 // When read barriers are enabled, we need a temporary register for
5639 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005640 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005641 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005642 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005643}
5644
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005645void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005646 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005647 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005648 Location obj_loc = locations->InAt(0);
5649 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005650 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005651 Location out_loc = locations->Out();
5652 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005653 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005654 locations->GetTemp(0) :
5655 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005656 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005657 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5658 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5659 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005660 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005661 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005662
5663 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005664 // Avoid null check if we know obj is not null.
5665 if (instruction->MustDoNullCheck()) {
5666 __ testl(obj, obj);
5667 __ j(kEqual, &zero);
5668 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005669
Roland Levillain0d5a2812015-11-13 10:07:31 +00005670 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005671 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005672
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005673 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005674 case TypeCheckKind::kExactCheck: {
5675 if (cls.IsRegister()) {
5676 __ cmpl(out, cls.AsRegister<CpuRegister>());
5677 } else {
5678 DCHECK(cls.IsStackSlot()) << cls;
5679 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5680 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005681 if (zero.IsLinked()) {
5682 // Classes must be equal for the instanceof to succeed.
5683 __ j(kNotEqual, &zero);
5684 __ movl(out, Immediate(1));
5685 __ jmp(&done);
5686 } else {
5687 __ setcc(kEqual, out);
5688 // setcc only sets the low byte.
5689 __ andl(out, Immediate(1));
5690 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005691 break;
5692 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005693
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005694 case TypeCheckKind::kAbstractClassCheck: {
5695 // If the class is abstract, we eagerly fetch the super class of the
5696 // object to avoid doing a comparison we know will fail.
5697 NearLabel loop, success;
5698 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005699 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005700 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005701 __ testl(out, out);
5702 // If `out` is null, we use it for the result, and jump to `done`.
5703 __ j(kEqual, &done);
5704 if (cls.IsRegister()) {
5705 __ cmpl(out, cls.AsRegister<CpuRegister>());
5706 } else {
5707 DCHECK(cls.IsStackSlot()) << cls;
5708 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5709 }
5710 __ j(kNotEqual, &loop);
5711 __ movl(out, Immediate(1));
5712 if (zero.IsLinked()) {
5713 __ jmp(&done);
5714 }
5715 break;
5716 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005717
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005718 case TypeCheckKind::kClassHierarchyCheck: {
5719 // Walk over the class hierarchy to find a match.
5720 NearLabel loop, success;
5721 __ Bind(&loop);
5722 if (cls.IsRegister()) {
5723 __ cmpl(out, cls.AsRegister<CpuRegister>());
5724 } else {
5725 DCHECK(cls.IsStackSlot()) << cls;
5726 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5727 }
5728 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005729 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005730 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005731 __ testl(out, out);
5732 __ j(kNotEqual, &loop);
5733 // If `out` is null, we use it for the result, and jump to `done`.
5734 __ jmp(&done);
5735 __ Bind(&success);
5736 __ movl(out, Immediate(1));
5737 if (zero.IsLinked()) {
5738 __ jmp(&done);
5739 }
5740 break;
5741 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005742
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005743 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005744 // Do an exact check.
5745 NearLabel exact_check;
5746 if (cls.IsRegister()) {
5747 __ cmpl(out, cls.AsRegister<CpuRegister>());
5748 } else {
5749 DCHECK(cls.IsStackSlot()) << cls;
5750 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5751 }
5752 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005753 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005754 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005755 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005756 __ testl(out, out);
5757 // If `out` is null, we use it for the result, and jump to `done`.
5758 __ j(kEqual, &done);
5759 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5760 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005761 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005762 __ movl(out, Immediate(1));
5763 __ jmp(&done);
5764 break;
5765 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005766
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005767 case TypeCheckKind::kArrayCheck: {
5768 if (cls.IsRegister()) {
5769 __ cmpl(out, cls.AsRegister<CpuRegister>());
5770 } else {
5771 DCHECK(cls.IsStackSlot()) << cls;
5772 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5773 }
5774 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005775 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5776 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005777 codegen_->AddSlowPath(slow_path);
5778 __ j(kNotEqual, slow_path->GetEntryLabel());
5779 __ movl(out, Immediate(1));
5780 if (zero.IsLinked()) {
5781 __ jmp(&done);
5782 }
5783 break;
5784 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005785
Calin Juravle98893e12015-10-02 21:05:03 +01005786 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005787 case TypeCheckKind::kInterfaceCheck: {
5788 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005789 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005790 // cases.
5791 //
5792 // We cannot directly call the InstanceofNonTrivial runtime
5793 // entry point without resorting to a type checking slow path
5794 // here (i.e. by calling InvokeRuntime directly), as it would
5795 // require to assign fixed registers for the inputs of this
5796 // HInstanceOf instruction (following the runtime calling
5797 // convention), which might be cluttered by the potential first
5798 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005799 //
5800 // TODO: Introduce a new runtime entry point taking the object
5801 // to test (instead of its class) as argument, and let it deal
5802 // with the read barrier issues. This will let us refactor this
5803 // case of the `switch` code as it was previously (with a direct
5804 // call to the runtime not using a type checking slow path).
5805 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005806 DCHECK(locations->OnlyCallsOnSlowPath());
5807 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5808 /* is_fatal */ false);
5809 codegen_->AddSlowPath(slow_path);
5810 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005811 if (zero.IsLinked()) {
5812 __ jmp(&done);
5813 }
5814 break;
5815 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005816 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005817
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005818 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005819 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005820 __ xorl(out, out);
5821 }
5822
5823 if (done.IsLinked()) {
5824 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005825 }
5826
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005827 if (slow_path != nullptr) {
5828 __ Bind(slow_path->GetExitLabel());
5829 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005830}
5831
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005832void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005833 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5834 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005835 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5836 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005837 case TypeCheckKind::kExactCheck:
5838 case TypeCheckKind::kAbstractClassCheck:
5839 case TypeCheckKind::kClassHierarchyCheck:
5840 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005841 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5842 LocationSummary::kCallOnSlowPath :
5843 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005844 break;
5845 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005846 case TypeCheckKind::kUnresolvedCheck:
5847 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005848 call_kind = LocationSummary::kCallOnSlowPath;
5849 break;
5850 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005851 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5852 locations->SetInAt(0, Location::RequiresRegister());
5853 locations->SetInAt(1, Location::Any());
5854 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5855 locations->AddTemp(Location::RequiresRegister());
5856 // When read barriers are enabled, we need an additional temporary
5857 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005858 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005859 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005860 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005861}
5862
5863void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005864 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005865 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005866 Location obj_loc = locations->InAt(0);
5867 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005868 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005869 Location temp_loc = locations->GetTemp(0);
5870 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005871 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005872 locations->GetTemp(1) :
5873 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005874 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5875 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5876 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5877 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005878
Roland Levillain0d5a2812015-11-13 10:07:31 +00005879 bool is_type_check_slow_path_fatal =
5880 (type_check_kind == TypeCheckKind::kExactCheck ||
5881 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5882 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5883 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5884 !instruction->CanThrowIntoCatchBlock();
5885 SlowPathCode* type_check_slow_path =
5886 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5887 is_type_check_slow_path_fatal);
5888 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005889
Roland Levillain0d5a2812015-11-13 10:07:31 +00005890 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005891 case TypeCheckKind::kExactCheck:
5892 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005893 NearLabel done;
5894 // Avoid null check if we know obj is not null.
5895 if (instruction->MustDoNullCheck()) {
5896 __ testl(obj, obj);
5897 __ j(kEqual, &done);
5898 }
5899
5900 // /* HeapReference<Class> */ temp = obj->klass_
5901 GenerateReferenceLoadTwoRegisters(
5902 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5903
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005904 if (cls.IsRegister()) {
5905 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5906 } else {
5907 DCHECK(cls.IsStackSlot()) << cls;
5908 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5909 }
5910 // Jump to slow path for throwing the exception or doing a
5911 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005912 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005913 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005914 break;
5915 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005916
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005917 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005918 NearLabel done;
5919 // Avoid null check if we know obj is not null.
5920 if (instruction->MustDoNullCheck()) {
5921 __ testl(obj, obj);
5922 __ j(kEqual, &done);
5923 }
5924
5925 // /* HeapReference<Class> */ temp = obj->klass_
5926 GenerateReferenceLoadTwoRegisters(
5927 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5928
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005929 // If the class is abstract, we eagerly fetch the super class of the
5930 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005931 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005932 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005933 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005934 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005935
5936 // If the class reference currently in `temp` is not null, jump
5937 // to the `compare_classes` label to compare it with the checked
5938 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005939 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005940 __ j(kNotEqual, &compare_classes);
5941 // Otherwise, jump to the slow path to throw the exception.
5942 //
5943 // But before, move back the object's class into `temp` before
5944 // going into the slow path, as it has been overwritten in the
5945 // meantime.
5946 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005947 GenerateReferenceLoadTwoRegisters(
5948 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005949 __ jmp(type_check_slow_path->GetEntryLabel());
5950
5951 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005952 if (cls.IsRegister()) {
5953 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5954 } else {
5955 DCHECK(cls.IsStackSlot()) << cls;
5956 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5957 }
5958 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005959 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005960 break;
5961 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005962
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005963 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005964 NearLabel done;
5965 // Avoid null check if we know obj is not null.
5966 if (instruction->MustDoNullCheck()) {
5967 __ testl(obj, obj);
5968 __ j(kEqual, &done);
5969 }
5970
5971 // /* HeapReference<Class> */ temp = obj->klass_
5972 GenerateReferenceLoadTwoRegisters(
5973 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5974
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005975 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005976 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005977 __ Bind(&loop);
5978 if (cls.IsRegister()) {
5979 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5980 } else {
5981 DCHECK(cls.IsStackSlot()) << cls;
5982 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5983 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005984 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005985
Roland Levillain0d5a2812015-11-13 10:07:31 +00005986 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005987 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005988
5989 // If the class reference currently in `temp` is not null, jump
5990 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005991 __ testl(temp, temp);
5992 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005993 // Otherwise, jump to the slow path to throw the exception.
5994 //
5995 // But before, move back the object's class into `temp` before
5996 // going into the slow path, as it has been overwritten in the
5997 // meantime.
5998 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005999 GenerateReferenceLoadTwoRegisters(
6000 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006001 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006002 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006003 break;
6004 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006005
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006006 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006007 // We cannot use a NearLabel here, as its range might be too
6008 // short in some cases when read barriers are enabled. This has
6009 // been observed for instance when the code emitted for this
6010 // case uses high x86-64 registers (R8-R15).
6011 Label done;
6012 // Avoid null check if we know obj is not null.
6013 if (instruction->MustDoNullCheck()) {
6014 __ testl(obj, obj);
6015 __ j(kEqual, &done);
6016 }
6017
6018 // /* HeapReference<Class> */ temp = obj->klass_
6019 GenerateReferenceLoadTwoRegisters(
6020 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6021
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006022 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006023 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006024 if (cls.IsRegister()) {
6025 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6026 } else {
6027 DCHECK(cls.IsStackSlot()) << cls;
6028 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6029 }
6030 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006031
6032 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006033 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006034 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006035
6036 // If the component type is not null (i.e. the object is indeed
6037 // an array), jump to label `check_non_primitive_component_type`
6038 // to further check that this component type is not a primitive
6039 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006040 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006041 __ j(kNotEqual, &check_non_primitive_component_type);
6042 // Otherwise, jump to the slow path to throw the exception.
6043 //
6044 // But before, move back the object's class into `temp` before
6045 // going into the slow path, as it has been overwritten in the
6046 // meantime.
6047 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006048 GenerateReferenceLoadTwoRegisters(
6049 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006050 __ jmp(type_check_slow_path->GetEntryLabel());
6051
6052 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006053 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006054 __ j(kEqual, &done);
6055 // Same comment as above regarding `temp` and the slow path.
6056 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006057 GenerateReferenceLoadTwoRegisters(
6058 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006059 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006060 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006061 break;
6062 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006063
Calin Juravle98893e12015-10-02 21:05:03 +01006064 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006065 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006066 NearLabel done;
6067 // Avoid null check if we know obj is not null.
6068 if (instruction->MustDoNullCheck()) {
6069 __ testl(obj, obj);
6070 __ j(kEqual, &done);
6071 }
6072
6073 // /* HeapReference<Class> */ temp = obj->klass_
6074 GenerateReferenceLoadTwoRegisters(
6075 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6076
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006077 // We always go into the type check slow path for the unresolved
6078 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006079 //
6080 // We cannot directly call the CheckCast runtime entry point
6081 // without resorting to a type checking slow path here (i.e. by
6082 // calling InvokeRuntime directly), as it would require to
6083 // assign fixed registers for the inputs of this HInstanceOf
6084 // instruction (following the runtime calling convention), which
6085 // might be cluttered by the potential first read barrier
6086 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006087 //
6088 // TODO: Introduce a new runtime entry point taking the object
6089 // to test (instead of its class) as argument, and let it deal
6090 // with the read barrier issues. This will let us refactor this
6091 // case of the `switch` code as it was previously (with a direct
6092 // call to the runtime not using a type checking slow path).
6093 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006094 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006095 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006096 break;
6097 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006098
Roland Levillain0d5a2812015-11-13 10:07:31 +00006099 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006100}
6101
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006102void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6103 LocationSummary* locations =
6104 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
6105 InvokeRuntimeCallingConvention calling_convention;
6106 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6107}
6108
6109void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006110 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6111 : QUICK_ENTRY_POINT(pUnlockObject),
6112 instruction,
6113 instruction->GetDexPc(),
6114 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006115 if (instruction->IsEnter()) {
6116 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6117 } else {
6118 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6119 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006120}
6121
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006122void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6123void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6124void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6125
6126void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6127 LocationSummary* locations =
6128 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6129 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6130 || instruction->GetResultType() == Primitive::kPrimLong);
6131 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006132 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006133 locations->SetOut(Location::SameAsFirstInput());
6134}
6135
6136void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6137 HandleBitwiseOperation(instruction);
6138}
6139
6140void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6141 HandleBitwiseOperation(instruction);
6142}
6143
6144void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6145 HandleBitwiseOperation(instruction);
6146}
6147
6148void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6149 LocationSummary* locations = instruction->GetLocations();
6150 Location first = locations->InAt(0);
6151 Location second = locations->InAt(1);
6152 DCHECK(first.Equals(locations->Out()));
6153
6154 if (instruction->GetResultType() == Primitive::kPrimInt) {
6155 if (second.IsRegister()) {
6156 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006157 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006158 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006159 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006160 } else {
6161 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006162 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006163 }
6164 } else if (second.IsConstant()) {
6165 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6166 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006167 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006168 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006169 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006170 } else {
6171 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006172 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006173 }
6174 } else {
6175 Address address(CpuRegister(RSP), second.GetStackIndex());
6176 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006177 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006178 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006179 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006180 } else {
6181 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006182 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006183 }
6184 }
6185 } else {
6186 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006187 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6188 bool second_is_constant = false;
6189 int64_t value = 0;
6190 if (second.IsConstant()) {
6191 second_is_constant = true;
6192 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006193 }
Mark Mendell40741f32015-04-20 22:10:34 -04006194 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006195
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006196 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006197 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006198 if (is_int32_value) {
6199 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6200 } else {
6201 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6202 }
6203 } else if (second.IsDoubleStackSlot()) {
6204 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006205 } else {
6206 __ andq(first_reg, second.AsRegister<CpuRegister>());
6207 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006208 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006209 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006210 if (is_int32_value) {
6211 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6212 } else {
6213 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6214 }
6215 } else if (second.IsDoubleStackSlot()) {
6216 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006217 } else {
6218 __ orq(first_reg, second.AsRegister<CpuRegister>());
6219 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006220 } else {
6221 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006222 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006223 if (is_int32_value) {
6224 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6225 } else {
6226 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6227 }
6228 } else if (second.IsDoubleStackSlot()) {
6229 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006230 } else {
6231 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6232 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006233 }
6234 }
6235}
6236
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006237void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6238 Location out,
6239 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006240 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006241 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6242 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006243 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006244 if (kUseBakerReadBarrier) {
6245 // Load with fast path based Baker's read barrier.
6246 // /* HeapReference<Object> */ out = *(out + offset)
6247 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006248 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006249 } else {
6250 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006251 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006252 // in the following move operation, as we will need it for the
6253 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006254 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006255 // /* HeapReference<Object> */ out = *(out + offset)
6256 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006257 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006258 }
6259 } else {
6260 // Plain load with no read barrier.
6261 // /* HeapReference<Object> */ out = *(out + offset)
6262 __ movl(out_reg, Address(out_reg, offset));
6263 __ MaybeUnpoisonHeapReference(out_reg);
6264 }
6265}
6266
6267void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6268 Location out,
6269 Location obj,
6270 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006271 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006272 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6273 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6274 if (kEmitCompilerReadBarrier) {
6275 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006276 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006277 // Load with fast path based Baker's read barrier.
6278 // /* HeapReference<Object> */ out = *(obj + offset)
6279 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006280 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006281 } else {
6282 // Load with slow path based read barrier.
6283 // /* HeapReference<Object> */ out = *(obj + offset)
6284 __ movl(out_reg, Address(obj_reg, offset));
6285 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6286 }
6287 } else {
6288 // Plain load with no read barrier.
6289 // /* HeapReference<Object> */ out = *(obj + offset)
6290 __ movl(out_reg, Address(obj_reg, offset));
6291 __ MaybeUnpoisonHeapReference(out_reg);
6292 }
6293}
6294
6295void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6296 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006297 const Address& address,
6298 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006299 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6300 if (kEmitCompilerReadBarrier) {
6301 if (kUseBakerReadBarrier) {
6302 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6303 // Baker's read barrier are used:
6304 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006305 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006306 // if (Thread::Current()->GetIsGcMarking()) {
6307 // root = ReadBarrier::Mark(root)
6308 // }
6309
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006310 // /* GcRoot<mirror::Object> */ root = *address
6311 __ movl(root_reg, address);
6312 if (fixup_label != nullptr) {
6313 __ Bind(fixup_label);
6314 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006315 static_assert(
6316 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6317 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6318 "have different sizes.");
6319 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6320 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6321 "have different sizes.");
6322
6323 // Slow path used to mark the GC root `root`.
6324 SlowPathCode* slow_path =
6325 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6326 codegen_->AddSlowPath(slow_path);
6327
6328 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6329 /* no_rip */ true),
6330 Immediate(0));
6331 __ j(kNotEqual, slow_path->GetEntryLabel());
6332 __ Bind(slow_path->GetExitLabel());
6333 } else {
6334 // GC root loaded through a slow path for read barriers other
6335 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006336 // /* GcRoot<mirror::Object>* */ root = address
6337 __ leaq(root_reg, address);
6338 if (fixup_label != nullptr) {
6339 __ Bind(fixup_label);
6340 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006341 // /* mirror::Object* */ root = root->Read()
6342 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6343 }
6344 } else {
6345 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006346 // /* GcRoot<mirror::Object> */ root = *address
6347 __ movl(root_reg, address);
6348 if (fixup_label != nullptr) {
6349 __ Bind(fixup_label);
6350 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006351 // Note that GC roots are not affected by heap poisoning, thus we
6352 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006353 }
6354}
6355
6356void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6357 Location ref,
6358 CpuRegister obj,
6359 uint32_t offset,
6360 Location temp,
6361 bool needs_null_check) {
6362 DCHECK(kEmitCompilerReadBarrier);
6363 DCHECK(kUseBakerReadBarrier);
6364
6365 // /* HeapReference<Object> */ ref = *(obj + offset)
6366 Address src(obj, offset);
6367 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6368}
6369
6370void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6371 Location ref,
6372 CpuRegister obj,
6373 uint32_t data_offset,
6374 Location index,
6375 Location temp,
6376 bool needs_null_check) {
6377 DCHECK(kEmitCompilerReadBarrier);
6378 DCHECK(kUseBakerReadBarrier);
6379
6380 // /* HeapReference<Object> */ ref =
6381 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6382 Address src = index.IsConstant() ?
6383 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6384 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6385 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6386}
6387
6388void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6389 Location ref,
6390 CpuRegister obj,
6391 const Address& src,
6392 Location temp,
6393 bool needs_null_check) {
6394 DCHECK(kEmitCompilerReadBarrier);
6395 DCHECK(kUseBakerReadBarrier);
6396
6397 // In slow path based read barriers, the read barrier call is
6398 // inserted after the original load. However, in fast path based
6399 // Baker's read barriers, we need to perform the load of
6400 // mirror::Object::monitor_ *before* the original reference load.
6401 // This load-load ordering is required by the read barrier.
6402 // The fast path/slow path (for Baker's algorithm) should look like:
6403 //
6404 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6405 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6406 // HeapReference<Object> ref = *src; // Original reference load.
6407 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6408 // if (is_gray) {
6409 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6410 // }
6411 //
6412 // Note: the original implementation in ReadBarrier::Barrier is
6413 // slightly more complex as:
6414 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006415 // the high-bits of rb_state, which are expected to be all zeroes
6416 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6417 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006418 // - it performs additional checks that we do not do here for
6419 // performance reasons.
6420
6421 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6422 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6423 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6424
6425 // /* int32_t */ monitor = obj->monitor_
6426 __ movl(temp_reg, Address(obj, monitor_offset));
6427 if (needs_null_check) {
6428 MaybeRecordImplicitNullCheck(instruction);
6429 }
6430 // /* LockWord */ lock_word = LockWord(monitor)
6431 static_assert(sizeof(LockWord) == sizeof(int32_t),
6432 "art::LockWord and int32_t have different sizes.");
6433 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6434 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6435 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6436 static_assert(
6437 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6438 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6439
6440 // Load fence to prevent load-load reordering.
6441 // Note that this is a no-op, thanks to the x86-64 memory model.
6442 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6443
6444 // The actual reference load.
6445 // /* HeapReference<Object> */ ref = *src
6446 __ movl(ref_reg, src);
6447
6448 // Object* ref = ref_addr->AsMirrorPtr()
6449 __ MaybeUnpoisonHeapReference(ref_reg);
6450
6451 // Slow path used to mark the object `ref` when it is gray.
6452 SlowPathCode* slow_path =
6453 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6454 AddSlowPath(slow_path);
6455
6456 // if (rb_state == ReadBarrier::gray_ptr_)
6457 // ref = ReadBarrier::Mark(ref);
6458 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6459 __ j(kEqual, slow_path->GetEntryLabel());
6460 __ Bind(slow_path->GetExitLabel());
6461}
6462
6463void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6464 Location out,
6465 Location ref,
6466 Location obj,
6467 uint32_t offset,
6468 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006469 DCHECK(kEmitCompilerReadBarrier);
6470
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006471 // Insert a slow path based read barrier *after* the reference load.
6472 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006473 // If heap poisoning is enabled, the unpoisoning of the loaded
6474 // reference will be carried out by the runtime within the slow
6475 // path.
6476 //
6477 // Note that `ref` currently does not get unpoisoned (when heap
6478 // poisoning is enabled), which is alright as the `ref` argument is
6479 // not used by the artReadBarrierSlow entry point.
6480 //
6481 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6482 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6483 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6484 AddSlowPath(slow_path);
6485
Roland Levillain0d5a2812015-11-13 10:07:31 +00006486 __ jmp(slow_path->GetEntryLabel());
6487 __ Bind(slow_path->GetExitLabel());
6488}
6489
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006490void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6491 Location out,
6492 Location ref,
6493 Location obj,
6494 uint32_t offset,
6495 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006496 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006497 // Baker's read barriers shall be handled by the fast path
6498 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6499 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006500 // If heap poisoning is enabled, unpoisoning will be taken care of
6501 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006502 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006503 } else if (kPoisonHeapReferences) {
6504 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6505 }
6506}
6507
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006508void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6509 Location out,
6510 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006511 DCHECK(kEmitCompilerReadBarrier);
6512
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006513 // Insert a slow path based read barrier *after* the GC root load.
6514 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006515 // Note that GC roots are not affected by heap poisoning, so we do
6516 // not need to do anything special for this here.
6517 SlowPathCode* slow_path =
6518 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6519 AddSlowPath(slow_path);
6520
Roland Levillain0d5a2812015-11-13 10:07:31 +00006521 __ jmp(slow_path->GetEntryLabel());
6522 __ Bind(slow_path->GetExitLabel());
6523}
6524
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006525void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006526 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006527 LOG(FATAL) << "Unreachable";
6528}
6529
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006530void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006531 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006532 LOG(FATAL) << "Unreachable";
6533}
6534
Mark Mendellfe57faa2015-09-18 09:26:15 -04006535// Simple implementation of packed switch - generate cascaded compare/jumps.
6536void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6537 LocationSummary* locations =
6538 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6539 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006540 locations->AddTemp(Location::RequiresRegister());
6541 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006542}
6543
6544void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6545 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006546 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006547 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006548 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6549 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6550 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006551 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6552
6553 // Should we generate smaller inline compare/jumps?
6554 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6555 // Figure out the correct compare values and jump conditions.
6556 // Handle the first compare/branch as a special case because it might
6557 // jump to the default case.
6558 DCHECK_GT(num_entries, 2u);
6559 Condition first_condition;
6560 uint32_t index;
6561 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6562 if (lower_bound != 0) {
6563 first_condition = kLess;
6564 __ cmpl(value_reg_in, Immediate(lower_bound));
6565 __ j(first_condition, codegen_->GetLabelOf(default_block));
6566 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6567
6568 index = 1;
6569 } else {
6570 // Handle all the compare/jumps below.
6571 first_condition = kBelow;
6572 index = 0;
6573 }
6574
6575 // Handle the rest of the compare/jumps.
6576 for (; index + 1 < num_entries; index += 2) {
6577 int32_t compare_to_value = lower_bound + index + 1;
6578 __ cmpl(value_reg_in, Immediate(compare_to_value));
6579 // Jump to successors[index] if value < case_value[index].
6580 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6581 // Jump to successors[index + 1] if value == case_value[index + 1].
6582 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6583 }
6584
6585 if (index != num_entries) {
6586 // There are an odd number of entries. Handle the last one.
6587 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006588 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006589 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6590 }
6591
6592 // And the default for any other value.
6593 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6594 __ jmp(codegen_->GetLabelOf(default_block));
6595 }
6596 return;
6597 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006598
6599 // Remove the bias, if needed.
6600 Register value_reg_out = value_reg_in.AsRegister();
6601 if (lower_bound != 0) {
6602 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6603 value_reg_out = temp_reg.AsRegister();
6604 }
6605 CpuRegister value_reg(value_reg_out);
6606
6607 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006608 __ cmpl(value_reg, Immediate(num_entries - 1));
6609 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006610
Mark Mendell9c86b482015-09-18 13:36:07 -04006611 // We are in the range of the table.
6612 // Load the address of the jump table in the constant area.
6613 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006614
Mark Mendell9c86b482015-09-18 13:36:07 -04006615 // Load the (signed) offset from the jump table.
6616 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6617
6618 // Add the offset to the address of the table base.
6619 __ addq(temp_reg, base_reg);
6620
6621 // And jump.
6622 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006623}
6624
Aart Bikc5d47542016-01-27 17:00:35 -08006625void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6626 if (value == 0) {
6627 __ xorl(dest, dest);
6628 } else {
6629 __ movl(dest, Immediate(value));
6630 }
6631}
6632
Mark Mendell92e83bf2015-05-07 11:25:03 -04006633void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6634 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006635 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006636 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006637 } else if (IsUint<32>(value)) {
6638 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006639 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6640 } else {
6641 __ movq(dest, Immediate(value));
6642 }
6643}
6644
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006645void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6646 if (value == 0) {
6647 __ xorps(dest, dest);
6648 } else {
6649 __ movss(dest, LiteralInt32Address(value));
6650 }
6651}
6652
6653void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6654 if (value == 0) {
6655 __ xorpd(dest, dest);
6656 } else {
6657 __ movsd(dest, LiteralInt64Address(value));
6658 }
6659}
6660
6661void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6662 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6663}
6664
6665void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6666 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6667}
6668
Aart Bika19616e2016-02-01 18:57:58 -08006669void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6670 if (value == 0) {
6671 __ testl(dest, dest);
6672 } else {
6673 __ cmpl(dest, Immediate(value));
6674 }
6675}
6676
6677void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6678 if (IsInt<32>(value)) {
6679 if (value == 0) {
6680 __ testq(dest, dest);
6681 } else {
6682 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6683 }
6684 } else {
6685 // Value won't fit in an int.
6686 __ cmpq(dest, LiteralInt64Address(value));
6687 }
6688}
6689
Mark Mendellcfa410b2015-05-25 16:02:44 -04006690void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6691 DCHECK(dest.IsDoubleStackSlot());
6692 if (IsInt<32>(value)) {
6693 // Can move directly as an int32 constant.
6694 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6695 Immediate(static_cast<int32_t>(value)));
6696 } else {
6697 Load64BitValue(CpuRegister(TMP), value);
6698 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6699 }
6700}
6701
Mark Mendell9c86b482015-09-18 13:36:07 -04006702/**
6703 * Class to handle late fixup of offsets into constant area.
6704 */
6705class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6706 public:
6707 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6708 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6709
6710 protected:
6711 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6712
6713 CodeGeneratorX86_64* codegen_;
6714
6715 private:
6716 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6717 // Patch the correct offset for the instruction. We use the address of the
6718 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6719 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6720 int32_t relative_position = constant_offset - pos;
6721
6722 // Patch in the right value.
6723 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6724 }
6725
6726 // Location in constant area that the fixup refers to.
6727 size_t offset_into_constant_area_;
6728};
6729
6730/**
6731 t * Class to handle late fixup of offsets to a jump table that will be created in the
6732 * constant area.
6733 */
6734class JumpTableRIPFixup : public RIPFixup {
6735 public:
6736 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6737 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6738
6739 void CreateJumpTable() {
6740 X86_64Assembler* assembler = codegen_->GetAssembler();
6741
6742 // Ensure that the reference to the jump table has the correct offset.
6743 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6744 SetOffset(offset_in_constant_table);
6745
6746 // Compute the offset from the start of the function to this jump table.
6747 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6748
6749 // Populate the jump table with the correct values for the jump table.
6750 int32_t num_entries = switch_instr_->GetNumEntries();
6751 HBasicBlock* block = switch_instr_->GetBlock();
6752 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6753 // The value that we want is the target offset - the position of the table.
6754 for (int32_t i = 0; i < num_entries; i++) {
6755 HBasicBlock* b = successors[i];
6756 Label* l = codegen_->GetLabelOf(b);
6757 DCHECK(l->IsBound());
6758 int32_t offset_to_block = l->Position() - current_table_offset;
6759 assembler->AppendInt32(offset_to_block);
6760 }
6761 }
6762
6763 private:
6764 const HPackedSwitch* switch_instr_;
6765};
6766
Mark Mendellf55c3e02015-03-26 21:07:46 -04006767void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6768 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006769 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006770 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6771 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006772 assembler->Align(4, 0);
6773 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006774
6775 // Populate any jump tables.
6776 for (auto jump_table : fixups_to_jump_tables_) {
6777 jump_table->CreateJumpTable();
6778 }
6779
6780 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006781 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006782 }
6783
6784 // And finish up.
6785 CodeGenerator::Finalize(allocator);
6786}
6787
Mark Mendellf55c3e02015-03-26 21:07:46 -04006788Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6789 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6790 return Address::RIP(fixup);
6791}
6792
6793Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6794 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6795 return Address::RIP(fixup);
6796}
6797
6798Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6799 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6800 return Address::RIP(fixup);
6801}
6802
6803Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6804 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6805 return Address::RIP(fixup);
6806}
6807
Andreas Gampe85b62f22015-09-09 13:15:38 -07006808// TODO: trg as memory.
6809void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6810 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006811 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006812 return;
6813 }
6814
6815 DCHECK_NE(type, Primitive::kPrimVoid);
6816
6817 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6818 if (trg.Equals(return_loc)) {
6819 return;
6820 }
6821
6822 // Let the parallel move resolver take care of all of this.
6823 HParallelMove parallel_move(GetGraph()->GetArena());
6824 parallel_move.AddMove(return_loc, trg, type, nullptr);
6825 GetMoveResolver()->EmitNativeCode(&parallel_move);
6826}
6827
Mark Mendell9c86b482015-09-18 13:36:07 -04006828Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6829 // Create a fixup to be used to create and address the jump table.
6830 JumpTableRIPFixup* table_fixup =
6831 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6832
6833 // We have to populate the jump tables.
6834 fixups_to_jump_tables_.push_back(table_fixup);
6835 return Address::RIP(table_fixup);
6836}
6837
Mark Mendellea5af682015-10-22 17:35:49 -04006838void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6839 const Address& addr_high,
6840 int64_t v,
6841 HInstruction* instruction) {
6842 if (IsInt<32>(v)) {
6843 int32_t v_32 = v;
6844 __ movq(addr_low, Immediate(v_32));
6845 MaybeRecordImplicitNullCheck(instruction);
6846 } else {
6847 // Didn't fit in a register. Do it in pieces.
6848 int32_t low_v = Low32Bits(v);
6849 int32_t high_v = High32Bits(v);
6850 __ movl(addr_low, Immediate(low_v));
6851 MaybeRecordImplicitNullCheck(instruction);
6852 __ movl(addr_high, Immediate(high_v));
6853 }
6854}
6855
Roland Levillain4d027112015-07-01 15:41:14 +01006856#undef __
6857
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006858} // namespace x86_64
6859} // namespace art