blob: 56c5b069458969823de58601d14e94c9266aa45d [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain62a46b22015-06-01 18:24:13 +010054#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())->
Calin Juravle175dc732015-08-25 15:42:32 +010055#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010056
Andreas Gampe85b62f22015-09-09 13:15:38 -070057class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010058 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000059 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060
Alexandre Rames2ed20af2015-03-06 13:55:35 +000061 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000062 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000064 if (instruction_->CanThrowIntoCatchBlock()) {
65 // Live registers will be restored in the catch block if caught.
66 SaveLiveRegisters(codegen, instruction_->GetLocations());
67 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000068 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
69 instruction_,
70 instruction_->GetDexPc(),
71 this);
Roland Levillain888d0672015-11-23 18:53:50 +000072 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 }
74
Alexandre Rames8158f282015-08-07 10:26:17 +010075 bool IsFatal() const OVERRIDE { return true; }
76
Alexandre Rames9931f312015-06-19 14:47:01 +010077 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
78
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
81};
82
Andreas Gampe85b62f22015-09-09 13:15:38 -070083class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000084 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000085 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000086
Alexandre Rames2ed20af2015-03-06 13:55:35 +000087 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000088 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000089 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000090 if (instruction_->CanThrowIntoCatchBlock()) {
91 // Live registers will be restored in the catch block if caught.
92 SaveLiveRegisters(codegen, instruction_->GetLocations());
93 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000094 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
95 instruction_,
96 instruction_->GetDexPc(),
97 this);
Roland Levillain888d0672015-11-23 18:53:50 +000098 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000099 }
100
Alexandre Rames8158f282015-08-07 10:26:17 +0100101 bool IsFatal() const OVERRIDE { return true; }
102
Alexandre Rames9931f312015-06-19 14:47:01 +0100103 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
104
Calin Juravled0d48522014-11-04 16:40:20 +0000105 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000106 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
107};
108
Andreas Gampe85b62f22015-09-09 13:15:38 -0700109class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000110 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000111 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
112 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000113
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000114 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000115 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000116 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000117 if (is_div_) {
118 __ negl(cpu_reg_);
119 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400120 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000121 }
122
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000123 } else {
124 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000125 if (is_div_) {
126 __ negq(cpu_reg_);
127 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400128 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000129 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000130 }
Calin Juravled0d48522014-11-04 16:40:20 +0000131 __ jmp(GetExitLabel());
132 }
133
Alexandre Rames9931f312015-06-19 14:47:01 +0100134 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
135
Calin Juravled0d48522014-11-04 16:40:20 +0000136 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000138 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000139 const bool is_div_;
140 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000141};
142
Andreas Gampe85b62f22015-09-09 13:15:38 -0700143class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100145 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000146 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000147
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000148 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000149 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000150 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000151 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000152 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
153 instruction_,
154 instruction_->GetDexPc(),
155 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000156 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000157 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100158 if (successor_ == nullptr) {
159 __ jmp(GetReturnLabel());
160 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000161 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100162 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000163 }
164
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100165 Label* GetReturnLabel() {
166 DCHECK(successor_ == nullptr);
167 return &return_label_;
168 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000169
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100170 HBasicBlock* GetSuccessor() const {
171 return successor_;
172 }
173
Alexandre Rames9931f312015-06-19 14:47:01 +0100174 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
175
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000176 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100177 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000178 Label return_label_;
179
180 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
181};
182
Andreas Gampe85b62f22015-09-09 13:15:38 -0700183class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100184 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100185 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000186 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100187
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100189 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000190 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100191 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000192 if (instruction_->CanThrowIntoCatchBlock()) {
193 // Live registers will be restored in the catch block if caught.
194 SaveLiveRegisters(codegen, instruction_->GetLocations());
195 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000196 // We're moving two locations to locations that could overlap, so we need a parallel
197 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100198 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000199 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100200 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000201 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100202 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100203 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100204 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
205 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000206 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
207 instruction_,
208 instruction_->GetDexPc(),
209 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000210 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100211 }
212
Alexandre Rames8158f282015-08-07 10:26:17 +0100213 bool IsFatal() const OVERRIDE { return true; }
214
Alexandre Rames9931f312015-06-19 14:47:01 +0100215 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
216
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100217 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100218 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
219};
220
Andreas Gampe85b62f22015-09-09 13:15:38 -0700221class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100222 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000223 LoadClassSlowPathX86_64(HLoadClass* cls,
224 HInstruction* at,
225 uint32_t dex_pc,
226 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000227 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000228 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
229 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100230
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000231 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000232 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000233 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100234 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000236 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000237
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100238 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000239 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000240 x86_64_codegen->InvokeRuntime(do_clinit_ ?
241 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
242 QUICK_ENTRY_POINT(pInitializeType),
243 at_,
244 dex_pc_,
245 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000246 if (do_clinit_) {
247 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
248 } else {
249 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
250 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100251
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000252 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000253 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000254 if (out.IsValid()) {
255 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000256 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000257 }
258
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000259 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260 __ jmp(GetExitLabel());
261 }
262
Alexandre Rames9931f312015-06-19 14:47:01 +0100263 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
264
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 // The class this slow path will load.
267 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100268
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269 // The instruction where this slow path is happening.
270 // (Might be the load class or an initialization check).
271 HInstruction* const at_;
272
273 // The dex PC of `at_`.
274 const uint32_t dex_pc_;
275
276 // Whether to initialize the class.
277 const bool do_clinit_;
278
279 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100280};
281
Andreas Gampe85b62f22015-09-09 13:15:38 -0700282class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000283 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000284 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000285
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000287 LocationSummary* locations = instruction_->GetLocations();
288 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
289
Roland Levillain0d5a2812015-11-13 10:07:31 +0000290 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000291 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000293
294 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000295 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
296 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000297 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
298 instruction_,
299 instruction_->GetDexPc(),
300 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000301 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000302 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000303 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000304 __ jmp(GetExitLabel());
305 }
306
Alexandre Rames9931f312015-06-19 14:47:01 +0100307 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
308
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000310 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
311};
312
Andreas Gampe85b62f22015-09-09 13:15:38 -0700313class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000314 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000315 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000316 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000317
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000318 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000319 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100320 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
321 : locations->Out();
322 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000323 DCHECK(instruction_->IsCheckCast()
324 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000325
Roland Levillain0d5a2812015-11-13 10:07:31 +0000326 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000327 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000328
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000329 if (!is_fatal_) {
330 SaveLiveRegisters(codegen, locations);
331 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
333 // We're moving two locations to locations that could overlap, so we need a parallel
334 // move resolver.
335 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000336 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100337 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000338 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100339 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100340 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100341 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
342 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000344 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000345 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
346 instruction_,
347 dex_pc,
348 this);
349 CheckEntrypointTypes<
350 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000351 } else {
352 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000353 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
354 instruction_,
355 dex_pc,
356 this);
357 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000358 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000360 if (!is_fatal_) {
361 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000362 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000363 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 RestoreLiveRegisters(codegen, locations);
366 __ jmp(GetExitLabel());
367 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000368 }
369
Alexandre Rames9931f312015-06-19 14:47:01 +0100370 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 bool IsFatal() const OVERRIDE { return is_fatal_; }
373
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000374 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000375 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000376
377 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
378};
379
Andreas Gampe85b62f22015-09-09 13:15:38 -0700380class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700381 public:
Aart Bik42249c32016-01-07 15:33:50 -0800382 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000383 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700384
385 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000386 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700387 __ Bind(GetEntryLabel());
388 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000389 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800390 instruction_,
391 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000392 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000393 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700394 }
395
Alexandre Rames9931f312015-06-19 14:47:01 +0100396 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
397
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700398 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
400};
401
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100402class ArraySetSlowPathX86_64 : public SlowPathCode {
403 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000404 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100405
406 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
407 LocationSummary* locations = instruction_->GetLocations();
408 __ Bind(GetEntryLabel());
409 SaveLiveRegisters(codegen, locations);
410
411 InvokeRuntimeCallingConvention calling_convention;
412 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
413 parallel_move.AddMove(
414 locations->InAt(0),
415 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
416 Primitive::kPrimNot,
417 nullptr);
418 parallel_move.AddMove(
419 locations->InAt(1),
420 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
421 Primitive::kPrimInt,
422 nullptr);
423 parallel_move.AddMove(
424 locations->InAt(2),
425 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
426 Primitive::kPrimNot,
427 nullptr);
428 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
429
Roland Levillain0d5a2812015-11-13 10:07:31 +0000430 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
431 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
432 instruction_,
433 instruction_->GetDexPc(),
434 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000435 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100436 RestoreLiveRegisters(codegen, locations);
437 __ jmp(GetExitLabel());
438 }
439
440 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
441
442 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100443 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
444};
445
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000446// Slow path marking an object during a read barrier.
447class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
448 public:
449 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000450 : SlowPathCode(instruction), out_(out), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000451 DCHECK(kEmitCompilerReadBarrier);
452 }
453
454 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
455
456 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
457 LocationSummary* locations = instruction_->GetLocations();
458 Register reg_out = out_.AsRegister<Register>();
459 DCHECK(locations->CanCall());
460 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
461 DCHECK(instruction_->IsInstanceFieldGet() ||
462 instruction_->IsStaticFieldGet() ||
463 instruction_->IsArrayGet() ||
464 instruction_->IsLoadClass() ||
465 instruction_->IsLoadString() ||
466 instruction_->IsInstanceOf() ||
467 instruction_->IsCheckCast())
468 << "Unexpected instruction in read barrier marking slow path: "
469 << instruction_->DebugName();
470
471 __ Bind(GetEntryLabel());
472 SaveLiveRegisters(codegen, locations);
473
474 InvokeRuntimeCallingConvention calling_convention;
475 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
476 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
477 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
478 instruction_,
479 instruction_->GetDexPc(),
480 this);
481 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
482 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
483
484 RestoreLiveRegisters(codegen, locations);
485 __ jmp(GetExitLabel());
486 }
487
488 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000489 const Location out_;
490 const Location obj_;
491
492 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
493};
494
Roland Levillain0d5a2812015-11-13 10:07:31 +0000495// Slow path generating a read barrier for a heap reference.
496class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
497 public:
498 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
499 Location out,
500 Location ref,
501 Location obj,
502 uint32_t offset,
503 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000504 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000505 out_(out),
506 ref_(ref),
507 obj_(obj),
508 offset_(offset),
509 index_(index) {
510 DCHECK(kEmitCompilerReadBarrier);
511 // If `obj` is equal to `out` or `ref`, it means the initial
512 // object has been overwritten by (or after) the heap object
513 // reference load to be instrumented, e.g.:
514 //
515 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000517 //
518 // In that case, we have lost the information about the original
519 // object, and the emitted read barrier cannot work properly.
520 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
521 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
522}
523
524 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
525 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
526 LocationSummary* locations = instruction_->GetLocations();
527 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
528 DCHECK(locations->CanCall());
529 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
530 DCHECK(!instruction_->IsInvoke() ||
531 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000532 instruction_->GetLocations()->Intrinsified()))
533 << "Unexpected instruction in read barrier for heap reference slow path: "
534 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000535
536 __ Bind(GetEntryLabel());
537 SaveLiveRegisters(codegen, locations);
538
539 // We may have to change the index's value, but as `index_` is a
540 // constant member (like other "inputs" of this slow path),
541 // introduce a copy of it, `index`.
542 Location index = index_;
543 if (index_.IsValid()) {
544 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
545 if (instruction_->IsArrayGet()) {
546 // Compute real offset and store it in index_.
547 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
548 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
549 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
550 // We are about to change the value of `index_reg` (see the
551 // calls to art::x86_64::X86_64Assembler::shll and
552 // art::x86_64::X86_64Assembler::AddImmediate below), but it
553 // has not been saved by the previous call to
554 // art::SlowPathCode::SaveLiveRegisters, as it is a
555 // callee-save register --
556 // art::SlowPathCode::SaveLiveRegisters does not consider
557 // callee-save registers, as it has been designed with the
558 // assumption that callee-save registers are supposed to be
559 // handled by the called function. So, as a callee-save
560 // register, `index_reg` _would_ eventually be saved onto
561 // the stack, but it would be too late: we would have
562 // changed its value earlier. Therefore, we manually save
563 // it here into another freely available register,
564 // `free_reg`, chosen of course among the caller-save
565 // registers (as a callee-save `free_reg` register would
566 // exhibit the same problem).
567 //
568 // Note we could have requested a temporary register from
569 // the register allocator instead; but we prefer not to, as
570 // this is a slow path, and we know we can find a
571 // caller-save register that is available.
572 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
573 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
574 index_reg = free_reg;
575 index = Location::RegisterLocation(index_reg);
576 } else {
577 // The initial register stored in `index_` has already been
578 // saved in the call to art::SlowPathCode::SaveLiveRegisters
579 // (as it is not a callee-save register), so we can freely
580 // use it.
581 }
582 // Shifting the index value contained in `index_reg` by the
583 // scale factor (2) cannot overflow in practice, as the
584 // runtime is unable to allocate object arrays with a size
585 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
586 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
587 static_assert(
588 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
589 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
590 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
591 } else {
592 DCHECK(instruction_->IsInvoke());
593 DCHECK(instruction_->GetLocations()->Intrinsified());
594 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
595 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
596 << instruction_->AsInvoke()->GetIntrinsic();
597 DCHECK_EQ(offset_, 0U);
598 DCHECK(index_.IsRegister());
599 }
600 }
601
602 // We're moving two or three locations to locations that could
603 // overlap, so we need a parallel move resolver.
604 InvokeRuntimeCallingConvention calling_convention;
605 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
606 parallel_move.AddMove(ref_,
607 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
608 Primitive::kPrimNot,
609 nullptr);
610 parallel_move.AddMove(obj_,
611 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
612 Primitive::kPrimNot,
613 nullptr);
614 if (index.IsValid()) {
615 parallel_move.AddMove(index,
616 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
617 Primitive::kPrimInt,
618 nullptr);
619 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
620 } else {
621 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
622 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
623 }
624 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
625 instruction_,
626 instruction_->GetDexPc(),
627 this);
628 CheckEntrypointTypes<
629 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
630 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
631
632 RestoreLiveRegisters(codegen, locations);
633 __ jmp(GetExitLabel());
634 }
635
636 const char* GetDescription() const OVERRIDE {
637 return "ReadBarrierForHeapReferenceSlowPathX86_64";
638 }
639
640 private:
641 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
642 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
643 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
644 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
645 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
646 return static_cast<CpuRegister>(i);
647 }
648 }
649 // We shall never fail to find a free caller-save register, as
650 // there are more than two core caller-save registers on x86-64
651 // (meaning it is possible to find one which is different from
652 // `ref` and `obj`).
653 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
654 LOG(FATAL) << "Could not find a free caller-save register";
655 UNREACHABLE();
656 }
657
Roland Levillain0d5a2812015-11-13 10:07:31 +0000658 const Location out_;
659 const Location ref_;
660 const Location obj_;
661 const uint32_t offset_;
662 // An additional location containing an index to an array.
663 // Only used for HArrayGet and the UnsafeGetObject &
664 // UnsafeGetObjectVolatile intrinsics.
665 const Location index_;
666
667 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
668};
669
670// Slow path generating a read barrier for a GC root.
671class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
672 public:
673 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000674 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000675 DCHECK(kEmitCompilerReadBarrier);
676 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000677
678 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
679 LocationSummary* locations = instruction_->GetLocations();
680 DCHECK(locations->CanCall());
681 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000682 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
683 << "Unexpected instruction in read barrier for GC root slow path: "
684 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000685
686 __ Bind(GetEntryLabel());
687 SaveLiveRegisters(codegen, locations);
688
689 InvokeRuntimeCallingConvention calling_convention;
690 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
691 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
692 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
693 instruction_,
694 instruction_->GetDexPc(),
695 this);
696 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
697 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
698
699 RestoreLiveRegisters(codegen, locations);
700 __ jmp(GetExitLabel());
701 }
702
703 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
704
705 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000706 const Location out_;
707 const Location root_;
708
709 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
710};
711
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100712#undef __
Roland Levillain62a46b22015-06-01 18:24:13 +0100713#define __ down_cast<X86_64Assembler*>(GetAssembler())->
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100714
Roland Levillain4fa13f62015-07-06 18:11:54 +0100715inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700716 switch (cond) {
717 case kCondEQ: return kEqual;
718 case kCondNE: return kNotEqual;
719 case kCondLT: return kLess;
720 case kCondLE: return kLessEqual;
721 case kCondGT: return kGreater;
722 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700723 case kCondB: return kBelow;
724 case kCondBE: return kBelowEqual;
725 case kCondA: return kAbove;
726 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700727 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100728 LOG(FATAL) << "Unreachable";
729 UNREACHABLE();
730}
731
Aart Bike9f37602015-10-09 11:15:55 -0700732// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100733inline Condition X86_64FPCondition(IfCondition cond) {
734 switch (cond) {
735 case kCondEQ: return kEqual;
736 case kCondNE: return kNotEqual;
737 case kCondLT: return kBelow;
738 case kCondLE: return kBelowEqual;
739 case kCondGT: return kAbove;
740 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700741 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100742 };
743 LOG(FATAL) << "Unreachable";
744 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700745}
746
Vladimir Markodc151b22015-10-15 18:02:30 +0100747HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
748 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
749 MethodReference target_method ATTRIBUTE_UNUSED) {
750 switch (desired_dispatch_info.code_ptr_location) {
751 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
752 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
753 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
754 return HInvokeStaticOrDirect::DispatchInfo {
755 desired_dispatch_info.method_load_kind,
756 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
757 desired_dispatch_info.method_load_data,
758 0u
759 };
760 default:
761 return desired_dispatch_info;
762 }
763}
764
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800765void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100766 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800767 // All registers are assumed to be correctly set up.
768
Vladimir Marko58155012015-08-19 12:49:41 +0000769 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
770 switch (invoke->GetMethodLoadKind()) {
771 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
772 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000773 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000774 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000775 break;
776 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000777 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000778 break;
779 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
780 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
781 break;
782 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
783 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
784 method_patches_.emplace_back(invoke->GetTargetMethod());
785 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
786 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000787 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000788 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000789 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000790 // Bind a new fixup label at the end of the "movl" insn.
791 uint32_t offset = invoke->GetDexCacheArrayOffset();
792 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000793 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000794 }
Vladimir Marko58155012015-08-19 12:49:41 +0000795 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000796 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000797 Register method_reg;
798 CpuRegister reg = temp.AsRegister<CpuRegister>();
799 if (current_method.IsRegister()) {
800 method_reg = current_method.AsRegister<Register>();
801 } else {
802 DCHECK(invoke->GetLocations()->Intrinsified());
803 DCHECK(!current_method.IsValid());
804 method_reg = reg.AsRegister();
805 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
806 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000807 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100808 __ movq(reg,
809 Address(CpuRegister(method_reg),
810 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100811 // temp = temp[index_in_cache];
812 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
813 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000814 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
815 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100816 }
Vladimir Marko58155012015-08-19 12:49:41 +0000817 }
818
819 switch (invoke->GetCodePtrLocation()) {
820 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
821 __ call(&frame_entry_label_);
822 break;
823 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
824 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
825 Label* label = &relative_call_patches_.back().label;
826 __ call(label); // Bind to the patch label, override at link time.
827 __ Bind(label); // Bind the label at the end of the "call" insn.
828 break;
829 }
830 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
831 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100832 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
833 LOG(FATAL) << "Unsupported";
834 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000835 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
836 // (callee_method + offset_of_quick_compiled_code)()
837 __ call(Address(callee_method.AsRegister<CpuRegister>(),
838 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
839 kX86_64WordSize).SizeValue()));
840 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000841 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800842
843 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800844}
845
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000846void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
847 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
848 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
849 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000850
851 // Use the calling convention instead of the location of the receiver, as
852 // intrinsics may have put the receiver in a different register. In the intrinsics
853 // slow path, the arguments have been moved to the right place, so here we are
854 // guaranteed that the receiver is the first register of the calling convention.
855 InvokeDexCallingConvention calling_convention;
856 Register receiver = calling_convention.GetRegisterAt(0);
857
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000858 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000859 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000860 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000861 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000862 // Instead of simply (possibly) unpoisoning `temp` here, we should
863 // emit a read barrier for the previous class reference load.
864 // However this is not required in practice, as this is an
865 // intermediate/temporary reference and because the current
866 // concurrent copying collector keeps the from-space memory
867 // intact/accessible until the end of the marking phase (the
868 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000869 __ MaybeUnpoisonHeapReference(temp);
870 // temp = temp->GetMethodAt(method_offset);
871 __ movq(temp, Address(temp, method_offset));
872 // call temp->GetEntryPoint();
873 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
874 kX86_64WordSize).SizeValue()));
875}
876
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000877void CodeGeneratorX86_64::RecordSimplePatch() {
878 if (GetCompilerOptions().GetIncludePatchInformation()) {
879 simple_patches_.emplace_back();
880 __ Bind(&simple_patches_.back());
881 }
882}
883
884void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
885 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
886 __ Bind(&string_patches_.back().label);
887}
888
889Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
890 uint32_t element_offset) {
891 // Add a patch entry and return the label.
892 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
893 return &pc_relative_dex_cache_patches_.back().label;
894}
895
Vladimir Marko58155012015-08-19 12:49:41 +0000896void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
897 DCHECK(linker_patches->empty());
898 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000899 method_patches_.size() +
900 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000901 pc_relative_dex_cache_patches_.size() +
902 simple_patches_.size() +
903 string_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000904 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000905 // The label points to the end of the "movl" insn but the literal offset for method
906 // patch needs to point to the embedded constant which occupies the last 4 bytes.
907 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000908 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000909 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000910 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
911 info.target_method.dex_file,
912 info.target_method.dex_method_index));
913 }
914 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000915 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000916 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
917 info.target_method.dex_file,
918 info.target_method.dex_method_index));
919 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000920 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
921 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000922 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
923 &info.target_dex_file,
924 info.label.Position(),
925 info.element_offset));
926 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000927 for (const Label& label : simple_patches_) {
928 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
929 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
930 }
931 for (const StringPatchInfo<Label>& info : string_patches_) {
932 // These are always PC-relative, see GetSupportedLoadStringKind().
933 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
934 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
935 &info.dex_file,
936 info.label.Position(),
937 info.string_index));
938 }
Vladimir Marko58155012015-08-19 12:49:41 +0000939}
940
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100941void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100942 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100943}
944
945void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100946 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100947}
948
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100949size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
950 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
951 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100952}
953
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100954size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
955 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
956 return kX86_64WordSize;
957}
958
959size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
960 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
961 return kX86_64WordSize;
962}
963
964size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
965 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
966 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100967}
968
Calin Juravle175dc732015-08-25 15:42:32 +0100969void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
970 HInstruction* instruction,
971 uint32_t dex_pc,
972 SlowPathCode* slow_path) {
973 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
974 instruction,
975 dex_pc,
976 slow_path);
977}
978
979void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100980 HInstruction* instruction,
981 uint32_t dex_pc,
982 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100983 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000984 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100985 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100986}
987
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000988static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000989// Use a fake return address register to mimic Quick.
990static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400991CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000992 const X86_64InstructionSetFeatures& isa_features,
993 const CompilerOptions& compiler_options,
994 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +0000995 : CodeGenerator(graph,
996 kNumberOfCpuRegisters,
997 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000998 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000999 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1000 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001001 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001002 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1003 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001004 compiler_options,
1005 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001006 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001007 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001008 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001009 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001010 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001011 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001012 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001013 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1014 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001015 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001016 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1017 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001018 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001019 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1020}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001021
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001022InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1023 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001024 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001025 assembler_(codegen->GetAssembler()),
1026 codegen_(codegen) {}
1027
David Brazdil58282f42016-01-14 12:45:10 +00001028void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001029 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001030 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001031
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001032 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001033 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001034}
1035
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001036static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001037 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001038}
David Srbecky9d8606d2015-04-12 09:35:32 +01001039
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001040static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001041 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001042}
1043
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001044void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001045 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001046 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001047 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001048 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001049 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001050
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001051 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001052 __ testq(CpuRegister(RAX), Address(
1053 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001054 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001055 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001056
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001057 if (HasEmptyFrame()) {
1058 return;
1059 }
1060
Nicolas Geoffray98893962015-01-21 12:32:32 +00001061 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001062 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001063 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001064 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001065 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1066 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001067 }
1068 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001069
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001070 int adjust = GetFrameSize() - GetCoreSpillSize();
1071 __ subq(CpuRegister(RSP), Immediate(adjust));
1072 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001073 uint32_t xmm_spill_location = GetFpuSpillStart();
1074 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001075
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001076 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1077 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001078 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1079 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1080 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001081 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001082 }
1083
Mathieu Chartiere401d142015-04-22 13:56:20 -07001084 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001085 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001086}
1087
1088void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001089 __ cfi().RememberState();
1090 if (!HasEmptyFrame()) {
1091 uint32_t xmm_spill_location = GetFpuSpillStart();
1092 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1093 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1094 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1095 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1096 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1097 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1098 }
1099 }
1100
1101 int adjust = GetFrameSize() - GetCoreSpillSize();
1102 __ addq(CpuRegister(RSP), Immediate(adjust));
1103 __ cfi().AdjustCFAOffset(-adjust);
1104
1105 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1106 Register reg = kCoreCalleeSaves[i];
1107 if (allocated_registers_.ContainsCoreRegister(reg)) {
1108 __ popq(CpuRegister(reg));
1109 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1110 __ cfi().Restore(DWARFReg(reg));
1111 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001112 }
1113 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001114 __ ret();
1115 __ cfi().RestoreState();
1116 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001117}
1118
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001119void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1120 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001121}
1122
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001123void CodeGeneratorX86_64::Move(Location destination, Location source) {
1124 if (source.Equals(destination)) {
1125 return;
1126 }
1127 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001128 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001129 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001130 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001131 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001132 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001133 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001134 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1135 } else if (source.IsConstant()) {
1136 HConstant* constant = source.GetConstant();
1137 if (constant->IsLongConstant()) {
1138 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1139 } else {
1140 Load32BitValue(dest, GetInt32ValueOf(constant));
1141 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001142 } else {
1143 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001144 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001145 }
1146 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001147 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001148 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001149 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001150 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001151 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1152 } else if (source.IsConstant()) {
1153 HConstant* constant = source.GetConstant();
1154 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1155 if (constant->IsFloatConstant()) {
1156 Load32BitValue(dest, static_cast<int32_t>(value));
1157 } else {
1158 Load64BitValue(dest, value);
1159 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001160 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001161 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001162 } else {
1163 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001164 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001165 }
1166 } else if (destination.IsStackSlot()) {
1167 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001168 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001169 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001170 } else if (source.IsFpuRegister()) {
1171 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001172 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001173 } else if (source.IsConstant()) {
1174 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001175 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001176 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001177 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001178 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001179 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1180 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001181 }
1182 } else {
1183 DCHECK(destination.IsDoubleStackSlot());
1184 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001185 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001186 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001187 } else if (source.IsFpuRegister()) {
1188 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001189 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001190 } else if (source.IsConstant()) {
1191 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001192 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001193 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001194 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001195 } else {
1196 DCHECK(constant->IsLongConstant());
1197 value = constant->AsLongConstant()->GetValue();
1198 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001199 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001200 } else {
1201 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001202 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1203 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001204 }
1205 }
1206}
1207
Calin Juravle175dc732015-08-25 15:42:32 +01001208void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1209 DCHECK(location.IsRegister());
1210 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1211}
1212
Calin Juravlee460d1d2015-09-29 04:52:17 +01001213void CodeGeneratorX86_64::MoveLocation(
1214 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1215 Move(dst, src);
1216}
1217
1218void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1219 if (location.IsRegister()) {
1220 locations->AddTemp(location);
1221 } else {
1222 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1223 }
1224}
1225
David Brazdilfc6a86a2015-06-26 10:33:45 +00001226void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001227 DCHECK(!successor->IsExitBlock());
1228
1229 HBasicBlock* block = got->GetBlock();
1230 HInstruction* previous = got->GetPrevious();
1231
1232 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001233 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001234 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1235 return;
1236 }
1237
1238 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1239 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1240 }
1241 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001242 __ jmp(codegen_->GetLabelOf(successor));
1243 }
1244}
1245
David Brazdilfc6a86a2015-06-26 10:33:45 +00001246void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1247 got->SetLocations(nullptr);
1248}
1249
1250void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1251 HandleGoto(got, got->GetSuccessor());
1252}
1253
1254void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1255 try_boundary->SetLocations(nullptr);
1256}
1257
1258void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1259 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1260 if (!successor->IsExitBlock()) {
1261 HandleGoto(try_boundary, successor);
1262 }
1263}
1264
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001265void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1266 exit->SetLocations(nullptr);
1267}
1268
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001269void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001270}
1271
Mark Mendell152408f2015-12-31 12:28:50 -05001272template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001273void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001274 LabelType* true_label,
1275 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001276 if (cond->IsFPConditionTrueIfNaN()) {
1277 __ j(kUnordered, true_label);
1278 } else if (cond->IsFPConditionFalseIfNaN()) {
1279 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001280 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001281 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001282}
1283
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001284void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001285 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001286
Mark Mendellc4701932015-04-10 13:18:51 -04001287 Location left = locations->InAt(0);
1288 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001289 Primitive::Type type = condition->InputAt(0)->GetType();
1290 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001291 case Primitive::kPrimBoolean:
1292 case Primitive::kPrimByte:
1293 case Primitive::kPrimChar:
1294 case Primitive::kPrimShort:
1295 case Primitive::kPrimInt:
1296 case Primitive::kPrimNot: {
1297 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1298 if (right.IsConstant()) {
1299 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1300 if (value == 0) {
1301 __ testl(left_reg, left_reg);
1302 } else {
1303 __ cmpl(left_reg, Immediate(value));
1304 }
1305 } else if (right.IsStackSlot()) {
1306 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1307 } else {
1308 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1309 }
1310 break;
1311 }
Mark Mendellc4701932015-04-10 13:18:51 -04001312 case Primitive::kPrimLong: {
1313 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1314 if (right.IsConstant()) {
1315 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001316 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001317 } else if (right.IsDoubleStackSlot()) {
1318 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1319 } else {
1320 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1321 }
Mark Mendellc4701932015-04-10 13:18:51 -04001322 break;
1323 }
1324 case Primitive::kPrimFloat: {
1325 if (right.IsFpuRegister()) {
1326 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1327 } else if (right.IsConstant()) {
1328 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1329 codegen_->LiteralFloatAddress(
1330 right.GetConstant()->AsFloatConstant()->GetValue()));
1331 } else {
1332 DCHECK(right.IsStackSlot());
1333 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1334 Address(CpuRegister(RSP), right.GetStackIndex()));
1335 }
Mark Mendellc4701932015-04-10 13:18:51 -04001336 break;
1337 }
1338 case Primitive::kPrimDouble: {
1339 if (right.IsFpuRegister()) {
1340 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1341 } else if (right.IsConstant()) {
1342 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1343 codegen_->LiteralDoubleAddress(
1344 right.GetConstant()->AsDoubleConstant()->GetValue()));
1345 } else {
1346 DCHECK(right.IsDoubleStackSlot());
1347 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1348 Address(CpuRegister(RSP), right.GetStackIndex()));
1349 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001350 break;
1351 }
1352 default:
1353 LOG(FATAL) << "Unexpected condition type " << type;
1354 }
1355}
1356
1357template<class LabelType>
1358void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1359 LabelType* true_target_in,
1360 LabelType* false_target_in) {
1361 // Generated branching requires both targets to be explicit. If either of the
1362 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1363 LabelType fallthrough_target;
1364 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1365 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1366
1367 // Generate the comparison to set the CC.
1368 GenerateCompareTest(condition);
1369
1370 // Now generate the correct jump(s).
1371 Primitive::Type type = condition->InputAt(0)->GetType();
1372 switch (type) {
1373 case Primitive::kPrimLong: {
1374 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1375 break;
1376 }
1377 case Primitive::kPrimFloat: {
1378 GenerateFPJumps(condition, true_target, false_target);
1379 break;
1380 }
1381 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001382 GenerateFPJumps(condition, true_target, false_target);
1383 break;
1384 }
1385 default:
1386 LOG(FATAL) << "Unexpected condition type " << type;
1387 }
1388
David Brazdil0debae72015-11-12 18:37:00 +00001389 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001390 __ jmp(false_target);
1391 }
David Brazdil0debae72015-11-12 18:37:00 +00001392
1393 if (fallthrough_target.IsLinked()) {
1394 __ Bind(&fallthrough_target);
1395 }
Mark Mendellc4701932015-04-10 13:18:51 -04001396}
1397
David Brazdil0debae72015-11-12 18:37:00 +00001398static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1399 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1400 // are set only strictly before `branch`. We can't use the eflags on long
1401 // conditions if they are materialized due to the complex branching.
1402 return cond->IsCondition() &&
1403 cond->GetNext() == branch &&
1404 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1405}
1406
Mark Mendell152408f2015-12-31 12:28:50 -05001407template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001408void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001409 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001410 LabelType* true_target,
1411 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001412 HInstruction* cond = instruction->InputAt(condition_input_index);
1413
1414 if (true_target == nullptr && false_target == nullptr) {
1415 // Nothing to do. The code always falls through.
1416 return;
1417 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001418 // Constant condition, statically compared against "true" (integer value 1).
1419 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001420 if (true_target != nullptr) {
1421 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001422 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001423 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001424 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001425 if (false_target != nullptr) {
1426 __ jmp(false_target);
1427 }
1428 }
1429 return;
1430 }
1431
1432 // The following code generates these patterns:
1433 // (1) true_target == nullptr && false_target != nullptr
1434 // - opposite condition true => branch to false_target
1435 // (2) true_target != nullptr && false_target == nullptr
1436 // - condition true => branch to true_target
1437 // (3) true_target != nullptr && false_target != nullptr
1438 // - condition true => branch to true_target
1439 // - branch to false_target
1440 if (IsBooleanValueOrMaterializedCondition(cond)) {
1441 if (AreEflagsSetFrom(cond, instruction)) {
1442 if (true_target == nullptr) {
1443 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1444 } else {
1445 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1446 }
1447 } else {
1448 // Materialized condition, compare against 0.
1449 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1450 if (lhs.IsRegister()) {
1451 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1452 } else {
1453 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1454 }
1455 if (true_target == nullptr) {
1456 __ j(kEqual, false_target);
1457 } else {
1458 __ j(kNotEqual, true_target);
1459 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001460 }
1461 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001462 // Condition has not been materialized, use its inputs as the
1463 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001464 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001465
David Brazdil0debae72015-11-12 18:37:00 +00001466 // If this is a long or FP comparison that has been folded into
1467 // the HCondition, generate the comparison directly.
1468 Primitive::Type type = condition->InputAt(0)->GetType();
1469 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1470 GenerateCompareTestAndBranch(condition, true_target, false_target);
1471 return;
1472 }
1473
1474 Location lhs = condition->GetLocations()->InAt(0);
1475 Location rhs = condition->GetLocations()->InAt(1);
1476 if (rhs.IsRegister()) {
1477 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1478 } else if (rhs.IsConstant()) {
1479 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001480 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001481 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001482 __ cmpl(lhs.AsRegister<CpuRegister>(),
1483 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1484 }
1485 if (true_target == nullptr) {
1486 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1487 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001488 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001489 }
Dave Allison20dfc792014-06-16 20:44:29 -07001490 }
David Brazdil0debae72015-11-12 18:37:00 +00001491
1492 // If neither branch falls through (case 3), the conditional branch to `true_target`
1493 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1494 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001495 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001496 }
1497}
1498
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001499void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001500 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1501 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001502 locations->SetInAt(0, Location::Any());
1503 }
1504}
1505
1506void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001507 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1508 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1509 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1510 nullptr : codegen_->GetLabelOf(true_successor);
1511 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1512 nullptr : codegen_->GetLabelOf(false_successor);
1513 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001514}
1515
1516void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1517 LocationSummary* locations = new (GetGraph()->GetArena())
1518 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001519 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001520 locations->SetInAt(0, Location::Any());
1521 }
1522}
1523
1524void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001525 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001526 GenerateTestAndBranch<Label>(deoptimize,
1527 /* condition_input_index */ 0,
1528 slow_path->GetEntryLabel(),
1529 /* false_target */ nullptr);
1530}
1531
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001532static bool SelectCanUseCMOV(HSelect* select) {
1533 // There are no conditional move instructions for XMMs.
1534 if (Primitive::IsFloatingPointType(select->GetType())) {
1535 return false;
1536 }
1537
1538 // A FP condition doesn't generate the single CC that we need.
1539 HInstruction* condition = select->GetCondition();
1540 if (condition->IsCondition() &&
1541 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1542 return false;
1543 }
1544
1545 // We can generate a CMOV for this Select.
1546 return true;
1547}
1548
David Brazdil74eb1b22015-12-14 11:44:01 +00001549void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1550 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1551 if (Primitive::IsFloatingPointType(select->GetType())) {
1552 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001553 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001554 } else {
1555 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001556 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001557 if (select->InputAt(1)->IsConstant()) {
1558 locations->SetInAt(1, Location::RequiresRegister());
1559 } else {
1560 locations->SetInAt(1, Location::Any());
1561 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001562 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001563 locations->SetInAt(1, Location::Any());
1564 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001565 }
1566 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1567 locations->SetInAt(2, Location::RequiresRegister());
1568 }
1569 locations->SetOut(Location::SameAsFirstInput());
1570}
1571
1572void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1573 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001574 if (SelectCanUseCMOV(select)) {
1575 // If both the condition and the source types are integer, we can generate
1576 // a CMOV to implement Select.
1577 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001578 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001579 DCHECK(locations->InAt(0).Equals(locations->Out()));
1580
1581 HInstruction* select_condition = select->GetCondition();
1582 Condition cond = kNotEqual;
1583
1584 // Figure out how to test the 'condition'.
1585 if (select_condition->IsCondition()) {
1586 HCondition* condition = select_condition->AsCondition();
1587 if (!condition->IsEmittedAtUseSite()) {
1588 // This was a previously materialized condition.
1589 // Can we use the existing condition code?
1590 if (AreEflagsSetFrom(condition, select)) {
1591 // Materialization was the previous instruction. Condition codes are right.
1592 cond = X86_64IntegerCondition(condition->GetCondition());
1593 } else {
1594 // No, we have to recreate the condition code.
1595 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1596 __ testl(cond_reg, cond_reg);
1597 }
1598 } else {
1599 GenerateCompareTest(condition);
1600 cond = X86_64IntegerCondition(condition->GetCondition());
1601 }
1602 } else {
1603 // Must be a boolean condition, which needs to be compared to 0.
1604 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1605 __ testl(cond_reg, cond_reg);
1606 }
1607
1608 // If the condition is true, overwrite the output, which already contains false.
1609 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001610 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1611 if (value_true_loc.IsRegister()) {
1612 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1613 } else {
1614 __ cmov(cond,
1615 value_false,
1616 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1617 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001618 } else {
1619 NearLabel false_target;
1620 GenerateTestAndBranch<NearLabel>(select,
1621 /* condition_input_index */ 2,
1622 /* true_target */ nullptr,
1623 &false_target);
1624 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1625 __ Bind(&false_target);
1626 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001627}
1628
David Srbecky0cf44932015-12-09 14:09:59 +00001629void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1630 new (GetGraph()->GetArena()) LocationSummary(info);
1631}
1632
David Srbeckyd28f4a02016-03-14 17:14:24 +00001633void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1634 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001635}
1636
1637void CodeGeneratorX86_64::GenerateNop() {
1638 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001639}
1640
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001641void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001642 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001643 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001644 // Handle the long/FP comparisons made in instruction simplification.
1645 switch (cond->InputAt(0)->GetType()) {
1646 case Primitive::kPrimLong:
1647 locations->SetInAt(0, Location::RequiresRegister());
1648 locations->SetInAt(1, Location::Any());
1649 break;
1650 case Primitive::kPrimFloat:
1651 case Primitive::kPrimDouble:
1652 locations->SetInAt(0, Location::RequiresFpuRegister());
1653 locations->SetInAt(1, Location::Any());
1654 break;
1655 default:
1656 locations->SetInAt(0, Location::RequiresRegister());
1657 locations->SetInAt(1, Location::Any());
1658 break;
1659 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001660 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001661 locations->SetOut(Location::RequiresRegister());
1662 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001663}
1664
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001665void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001666 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001667 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001668 }
Mark Mendellc4701932015-04-10 13:18:51 -04001669
1670 LocationSummary* locations = cond->GetLocations();
1671 Location lhs = locations->InAt(0);
1672 Location rhs = locations->InAt(1);
1673 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001674 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001675
1676 switch (cond->InputAt(0)->GetType()) {
1677 default:
1678 // Integer case.
1679
1680 // Clear output register: setcc only sets the low byte.
1681 __ xorl(reg, reg);
1682
1683 if (rhs.IsRegister()) {
1684 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1685 } else if (rhs.IsConstant()) {
1686 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001687 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001688 } else {
1689 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1690 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001691 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001692 return;
1693 case Primitive::kPrimLong:
1694 // Clear output register: setcc only sets the low byte.
1695 __ xorl(reg, reg);
1696
1697 if (rhs.IsRegister()) {
1698 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1699 } else if (rhs.IsConstant()) {
1700 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001701 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001702 } else {
1703 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1704 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001705 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001706 return;
1707 case Primitive::kPrimFloat: {
1708 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1709 if (rhs.IsConstant()) {
1710 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1711 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1712 } else if (rhs.IsStackSlot()) {
1713 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1714 } else {
1715 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1716 }
1717 GenerateFPJumps(cond, &true_label, &false_label);
1718 break;
1719 }
1720 case Primitive::kPrimDouble: {
1721 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1722 if (rhs.IsConstant()) {
1723 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1724 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1725 } else if (rhs.IsDoubleStackSlot()) {
1726 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1727 } else {
1728 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1729 }
1730 GenerateFPJumps(cond, &true_label, &false_label);
1731 break;
1732 }
1733 }
1734
1735 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001736 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001737
Roland Levillain4fa13f62015-07-06 18:11:54 +01001738 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001739 __ Bind(&false_label);
1740 __ xorl(reg, reg);
1741 __ jmp(&done_label);
1742
Roland Levillain4fa13f62015-07-06 18:11:54 +01001743 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001744 __ Bind(&true_label);
1745 __ movl(reg, Immediate(1));
1746 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001747}
1748
1749void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001750 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001751}
1752
1753void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001754 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001755}
1756
1757void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001758 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001759}
1760
1761void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001762 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001763}
1764
1765void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001766 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001767}
1768
1769void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001770 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001771}
1772
1773void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001774 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001775}
1776
1777void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001778 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001779}
1780
1781void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001782 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001783}
1784
1785void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001786 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001787}
1788
1789void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001790 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001791}
1792
1793void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001794 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001795}
1796
Aart Bike9f37602015-10-09 11:15:55 -07001797void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001798 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001799}
1800
1801void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001802 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001803}
1804
1805void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001806 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001807}
1808
1809void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001810 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001811}
1812
1813void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001814 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001815}
1816
1817void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001818 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001819}
1820
1821void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001822 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001823}
1824
1825void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001826 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001827}
1828
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001829void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001830 LocationSummary* locations =
1831 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001832 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001833 case Primitive::kPrimBoolean:
1834 case Primitive::kPrimByte:
1835 case Primitive::kPrimShort:
1836 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001837 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001838 case Primitive::kPrimLong: {
1839 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001840 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001841 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1842 break;
1843 }
1844 case Primitive::kPrimFloat:
1845 case Primitive::kPrimDouble: {
1846 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001847 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001848 locations->SetOut(Location::RequiresRegister());
1849 break;
1850 }
1851 default:
1852 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1853 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001854}
1855
1856void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001857 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001858 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001859 Location left = locations->InAt(0);
1860 Location right = locations->InAt(1);
1861
Mark Mendell0c9497d2015-08-21 09:30:05 -04001862 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001863 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001864 Condition less_cond = kLess;
1865
Calin Juravleddb7df22014-11-25 20:56:51 +00001866 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001867 case Primitive::kPrimBoolean:
1868 case Primitive::kPrimByte:
1869 case Primitive::kPrimShort:
1870 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001871 case Primitive::kPrimInt: {
1872 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1873 if (right.IsConstant()) {
1874 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1875 codegen_->Compare32BitValue(left_reg, value);
1876 } else if (right.IsStackSlot()) {
1877 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1878 } else {
1879 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1880 }
1881 break;
1882 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001883 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001884 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1885 if (right.IsConstant()) {
1886 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001887 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001888 } else if (right.IsDoubleStackSlot()) {
1889 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001890 } else {
1891 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1892 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001893 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001894 }
1895 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001896 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1897 if (right.IsConstant()) {
1898 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1899 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1900 } else if (right.IsStackSlot()) {
1901 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1902 } else {
1903 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1904 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001905 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001906 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001907 break;
1908 }
1909 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001910 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1911 if (right.IsConstant()) {
1912 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1913 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1914 } else if (right.IsDoubleStackSlot()) {
1915 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1916 } else {
1917 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1918 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001919 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001920 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001921 break;
1922 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001923 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001924 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001925 }
Aart Bika19616e2016-02-01 18:57:58 -08001926
Calin Juravleddb7df22014-11-25 20:56:51 +00001927 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001928 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001929 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001930
Calin Juravle91debbc2014-11-26 19:01:09 +00001931 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001932 __ movl(out, Immediate(1));
1933 __ jmp(&done);
1934
1935 __ Bind(&less);
1936 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001937
1938 __ Bind(&done);
1939}
1940
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001941void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001942 LocationSummary* locations =
1943 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001944 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001945}
1946
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001947void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001948 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001949}
1950
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001951void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1952 LocationSummary* locations =
1953 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1954 locations->SetOut(Location::ConstantLocation(constant));
1955}
1956
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001957void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001958 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001959}
1960
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001961void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001962 LocationSummary* locations =
1963 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001964 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001965}
1966
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001967void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001968 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001969}
1970
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001971void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1972 LocationSummary* locations =
1973 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1974 locations->SetOut(Location::ConstantLocation(constant));
1975}
1976
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001977void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001978 // Will be generated at use site.
1979}
1980
1981void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1982 LocationSummary* locations =
1983 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1984 locations->SetOut(Location::ConstantLocation(constant));
1985}
1986
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001987void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
1988 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001989 // Will be generated at use site.
1990}
1991
Calin Juravle27df7582015-04-17 19:12:31 +01001992void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1993 memory_barrier->SetLocations(nullptr);
1994}
1995
1996void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001997 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01001998}
1999
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002000void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2001 ret->SetLocations(nullptr);
2002}
2003
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002004void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002005 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002006}
2007
2008void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002009 LocationSummary* locations =
2010 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002011 switch (ret->InputAt(0)->GetType()) {
2012 case Primitive::kPrimBoolean:
2013 case Primitive::kPrimByte:
2014 case Primitive::kPrimChar:
2015 case Primitive::kPrimShort:
2016 case Primitive::kPrimInt:
2017 case Primitive::kPrimNot:
2018 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002019 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002020 break;
2021
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002022 case Primitive::kPrimFloat:
2023 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002024 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002025 break;
2026
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002027 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002028 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002029 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002030}
2031
2032void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2033 if (kIsDebugBuild) {
2034 switch (ret->InputAt(0)->GetType()) {
2035 case Primitive::kPrimBoolean:
2036 case Primitive::kPrimByte:
2037 case Primitive::kPrimChar:
2038 case Primitive::kPrimShort:
2039 case Primitive::kPrimInt:
2040 case Primitive::kPrimNot:
2041 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002042 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002043 break;
2044
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002045 case Primitive::kPrimFloat:
2046 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002047 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002048 XMM0);
2049 break;
2050
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002051 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002052 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002053 }
2054 }
2055 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002056}
2057
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002058Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2059 switch (type) {
2060 case Primitive::kPrimBoolean:
2061 case Primitive::kPrimByte:
2062 case Primitive::kPrimChar:
2063 case Primitive::kPrimShort:
2064 case Primitive::kPrimInt:
2065 case Primitive::kPrimNot:
2066 case Primitive::kPrimLong:
2067 return Location::RegisterLocation(RAX);
2068
2069 case Primitive::kPrimVoid:
2070 return Location::NoLocation();
2071
2072 case Primitive::kPrimDouble:
2073 case Primitive::kPrimFloat:
2074 return Location::FpuRegisterLocation(XMM0);
2075 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002076
2077 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002078}
2079
2080Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2081 return Location::RegisterLocation(kMethodRegisterArgument);
2082}
2083
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002084Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002085 switch (type) {
2086 case Primitive::kPrimBoolean:
2087 case Primitive::kPrimByte:
2088 case Primitive::kPrimChar:
2089 case Primitive::kPrimShort:
2090 case Primitive::kPrimInt:
2091 case Primitive::kPrimNot: {
2092 uint32_t index = gp_index_++;
2093 stack_index_++;
2094 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002095 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002096 } else {
2097 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2098 }
2099 }
2100
2101 case Primitive::kPrimLong: {
2102 uint32_t index = gp_index_;
2103 stack_index_ += 2;
2104 if (index < calling_convention.GetNumberOfRegisters()) {
2105 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002106 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002107 } else {
2108 gp_index_ += 2;
2109 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2110 }
2111 }
2112
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002113 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002114 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002115 stack_index_++;
2116 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002117 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002118 } else {
2119 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2120 }
2121 }
2122
2123 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002124 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002125 stack_index_ += 2;
2126 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002127 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002128 } else {
2129 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2130 }
2131 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002132
2133 case Primitive::kPrimVoid:
2134 LOG(FATAL) << "Unexpected parameter type " << type;
2135 break;
2136 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002137 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002138}
2139
Calin Juravle175dc732015-08-25 15:42:32 +01002140void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2141 // The trampoline uses the same calling convention as dex calling conventions,
2142 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2143 // the method_idx.
2144 HandleInvoke(invoke);
2145}
2146
2147void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2148 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2149}
2150
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002151void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002152 // Explicit clinit checks triggered by static invokes must have been pruned by
2153 // art::PrepareForRegisterAllocation.
2154 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002155
Mark Mendellfb8d2792015-03-31 22:16:59 -04002156 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002157 if (intrinsic.TryDispatch(invoke)) {
2158 return;
2159 }
2160
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002161 HandleInvoke(invoke);
2162}
2163
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002164static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2165 if (invoke->GetLocations()->Intrinsified()) {
2166 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2167 intrinsic.Dispatch(invoke);
2168 return true;
2169 }
2170 return false;
2171}
2172
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002173void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002174 // Explicit clinit checks triggered by static invokes must have been pruned by
2175 // art::PrepareForRegisterAllocation.
2176 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002177
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002178 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2179 return;
2180 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002181
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002182 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002183 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002184 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002185 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002186}
2187
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002188void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002189 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002190 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002191}
2192
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002193void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002194 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002195 if (intrinsic.TryDispatch(invoke)) {
2196 return;
2197 }
2198
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002199 HandleInvoke(invoke);
2200}
2201
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002202void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002203 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2204 return;
2205 }
2206
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002207 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002208 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002209 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002210}
2211
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002212void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2213 HandleInvoke(invoke);
2214 // Add the hidden argument.
2215 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2216}
2217
2218void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2219 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002220 LocationSummary* locations = invoke->GetLocations();
2221 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2222 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002223 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2224 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002225 Location receiver = locations->InAt(0);
2226 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2227
Roland Levillain0d5a2812015-11-13 10:07:31 +00002228 // Set the hidden argument. This is safe to do this here, as RAX
2229 // won't be modified thereafter, before the `call` instruction.
2230 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002231 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002232
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002233 if (receiver.IsStackSlot()) {
2234 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002235 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002236 __ movl(temp, Address(temp, class_offset));
2237 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002238 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002239 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002240 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002241 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002242 // Instead of simply (possibly) unpoisoning `temp` here, we should
2243 // emit a read barrier for the previous class reference load.
2244 // However this is not required in practice, as this is an
2245 // intermediate/temporary reference and because the current
2246 // concurrent copying collector keeps the from-space memory
2247 // intact/accessible until the end of the marking phase (the
2248 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002249 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002250 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002251 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002252 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002253 __ call(Address(temp,
2254 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002255
2256 DCHECK(!codegen_->IsLeafMethod());
2257 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2258}
2259
Roland Levillain88cb1752014-10-20 16:36:47 +01002260void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2261 LocationSummary* locations =
2262 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2263 switch (neg->GetResultType()) {
2264 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002265 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002266 locations->SetInAt(0, Location::RequiresRegister());
2267 locations->SetOut(Location::SameAsFirstInput());
2268 break;
2269
Roland Levillain88cb1752014-10-20 16:36:47 +01002270 case Primitive::kPrimFloat:
2271 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002272 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002273 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002274 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002275 break;
2276
2277 default:
2278 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2279 }
2280}
2281
2282void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2283 LocationSummary* locations = neg->GetLocations();
2284 Location out = locations->Out();
2285 Location in = locations->InAt(0);
2286 switch (neg->GetResultType()) {
2287 case Primitive::kPrimInt:
2288 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002289 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002290 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002291 break;
2292
2293 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002294 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002295 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002296 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002297 break;
2298
Roland Levillain5368c212014-11-27 15:03:41 +00002299 case Primitive::kPrimFloat: {
2300 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002301 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002302 // Implement float negation with an exclusive or with value
2303 // 0x80000000 (mask for bit 31, representing the sign of a
2304 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002305 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002306 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002307 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002308 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002309
Roland Levillain5368c212014-11-27 15:03:41 +00002310 case Primitive::kPrimDouble: {
2311 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002312 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002313 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002314 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002315 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002316 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002317 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002318 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002319 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002320
2321 default:
2322 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2323 }
2324}
2325
Roland Levillaindff1f282014-11-05 14:15:05 +00002326void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2327 LocationSummary* locations =
2328 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2329 Primitive::Type result_type = conversion->GetResultType();
2330 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002331 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002332
David Brazdilb2bd1c52015-03-25 11:17:37 +00002333 // The Java language does not allow treating boolean as an integral type but
2334 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002335
Roland Levillaindff1f282014-11-05 14:15:05 +00002336 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002337 case Primitive::kPrimByte:
2338 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002339 case Primitive::kPrimLong:
2340 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002341 case Primitive::kPrimBoolean:
2342 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002343 case Primitive::kPrimShort:
2344 case Primitive::kPrimInt:
2345 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002346 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002347 locations->SetInAt(0, Location::Any());
2348 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2349 break;
2350
2351 default:
2352 LOG(FATAL) << "Unexpected type conversion from " << input_type
2353 << " to " << result_type;
2354 }
2355 break;
2356
Roland Levillain01a8d712014-11-14 16:27:39 +00002357 case Primitive::kPrimShort:
2358 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002359 case Primitive::kPrimLong:
2360 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002361 case Primitive::kPrimBoolean:
2362 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002363 case Primitive::kPrimByte:
2364 case Primitive::kPrimInt:
2365 case Primitive::kPrimChar:
2366 // Processing a Dex `int-to-short' instruction.
2367 locations->SetInAt(0, Location::Any());
2368 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2369 break;
2370
2371 default:
2372 LOG(FATAL) << "Unexpected type conversion from " << input_type
2373 << " to " << result_type;
2374 }
2375 break;
2376
Roland Levillain946e1432014-11-11 17:35:19 +00002377 case Primitive::kPrimInt:
2378 switch (input_type) {
2379 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002380 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002381 locations->SetInAt(0, Location::Any());
2382 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2383 break;
2384
2385 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002386 // Processing a Dex `float-to-int' instruction.
2387 locations->SetInAt(0, Location::RequiresFpuRegister());
2388 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002389 break;
2390
Roland Levillain946e1432014-11-11 17:35:19 +00002391 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002392 // Processing a Dex `double-to-int' instruction.
2393 locations->SetInAt(0, Location::RequiresFpuRegister());
2394 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002395 break;
2396
2397 default:
2398 LOG(FATAL) << "Unexpected type conversion from " << input_type
2399 << " to " << result_type;
2400 }
2401 break;
2402
Roland Levillaindff1f282014-11-05 14:15:05 +00002403 case Primitive::kPrimLong:
2404 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002405 case Primitive::kPrimBoolean:
2406 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002407 case Primitive::kPrimByte:
2408 case Primitive::kPrimShort:
2409 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002410 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002411 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002412 // TODO: We would benefit from a (to-be-implemented)
2413 // Location::RegisterOrStackSlot requirement for this input.
2414 locations->SetInAt(0, Location::RequiresRegister());
2415 locations->SetOut(Location::RequiresRegister());
2416 break;
2417
2418 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002419 // Processing a Dex `float-to-long' instruction.
2420 locations->SetInAt(0, Location::RequiresFpuRegister());
2421 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002422 break;
2423
Roland Levillaindff1f282014-11-05 14:15:05 +00002424 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002425 // Processing a Dex `double-to-long' instruction.
2426 locations->SetInAt(0, Location::RequiresFpuRegister());
2427 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002428 break;
2429
2430 default:
2431 LOG(FATAL) << "Unexpected type conversion from " << input_type
2432 << " to " << result_type;
2433 }
2434 break;
2435
Roland Levillain981e4542014-11-14 11:47:14 +00002436 case Primitive::kPrimChar:
2437 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002438 case Primitive::kPrimLong:
2439 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002440 case Primitive::kPrimBoolean:
2441 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002442 case Primitive::kPrimByte:
2443 case Primitive::kPrimShort:
2444 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002445 // Processing a Dex `int-to-char' instruction.
2446 locations->SetInAt(0, Location::Any());
2447 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2448 break;
2449
2450 default:
2451 LOG(FATAL) << "Unexpected type conversion from " << input_type
2452 << " to " << result_type;
2453 }
2454 break;
2455
Roland Levillaindff1f282014-11-05 14:15:05 +00002456 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002457 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002458 case Primitive::kPrimBoolean:
2459 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002460 case Primitive::kPrimByte:
2461 case Primitive::kPrimShort:
2462 case Primitive::kPrimInt:
2463 case Primitive::kPrimChar:
2464 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002465 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002466 locations->SetOut(Location::RequiresFpuRegister());
2467 break;
2468
2469 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002470 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002471 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002472 locations->SetOut(Location::RequiresFpuRegister());
2473 break;
2474
Roland Levillaincff13742014-11-17 14:32:17 +00002475 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002476 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002477 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002478 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002479 break;
2480
2481 default:
2482 LOG(FATAL) << "Unexpected type conversion from " << input_type
2483 << " to " << result_type;
2484 };
2485 break;
2486
Roland Levillaindff1f282014-11-05 14:15:05 +00002487 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002488 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002489 case Primitive::kPrimBoolean:
2490 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002491 case Primitive::kPrimByte:
2492 case Primitive::kPrimShort:
2493 case Primitive::kPrimInt:
2494 case Primitive::kPrimChar:
2495 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002496 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002497 locations->SetOut(Location::RequiresFpuRegister());
2498 break;
2499
2500 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002501 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002502 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002503 locations->SetOut(Location::RequiresFpuRegister());
2504 break;
2505
Roland Levillaincff13742014-11-17 14:32:17 +00002506 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002507 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002508 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002509 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002510 break;
2511
2512 default:
2513 LOG(FATAL) << "Unexpected type conversion from " << input_type
2514 << " to " << result_type;
2515 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002516 break;
2517
2518 default:
2519 LOG(FATAL) << "Unexpected type conversion from " << input_type
2520 << " to " << result_type;
2521 }
2522}
2523
2524void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2525 LocationSummary* locations = conversion->GetLocations();
2526 Location out = locations->Out();
2527 Location in = locations->InAt(0);
2528 Primitive::Type result_type = conversion->GetResultType();
2529 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002530 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002531 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002532 case Primitive::kPrimByte:
2533 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002534 case Primitive::kPrimLong:
2535 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002536 case Primitive::kPrimBoolean:
2537 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002538 case Primitive::kPrimShort:
2539 case Primitive::kPrimInt:
2540 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002541 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002542 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002543 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002544 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002545 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002546 Address(CpuRegister(RSP), in.GetStackIndex()));
2547 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002548 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002549 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002550 }
2551 break;
2552
2553 default:
2554 LOG(FATAL) << "Unexpected type conversion from " << input_type
2555 << " to " << result_type;
2556 }
2557 break;
2558
Roland Levillain01a8d712014-11-14 16:27:39 +00002559 case Primitive::kPrimShort:
2560 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002561 case Primitive::kPrimLong:
2562 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002563 case Primitive::kPrimBoolean:
2564 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002565 case Primitive::kPrimByte:
2566 case Primitive::kPrimInt:
2567 case Primitive::kPrimChar:
2568 // Processing a Dex `int-to-short' instruction.
2569 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002570 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002571 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002572 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002573 Address(CpuRegister(RSP), in.GetStackIndex()));
2574 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002575 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002576 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002577 }
2578 break;
2579
2580 default:
2581 LOG(FATAL) << "Unexpected type conversion from " << input_type
2582 << " to " << result_type;
2583 }
2584 break;
2585
Roland Levillain946e1432014-11-11 17:35:19 +00002586 case Primitive::kPrimInt:
2587 switch (input_type) {
2588 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002589 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002590 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002591 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002592 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002593 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002594 Address(CpuRegister(RSP), in.GetStackIndex()));
2595 } else {
2596 DCHECK(in.IsConstant());
2597 DCHECK(in.GetConstant()->IsLongConstant());
2598 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002599 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002600 }
2601 break;
2602
Roland Levillain3f8f9362014-12-02 17:45:01 +00002603 case Primitive::kPrimFloat: {
2604 // Processing a Dex `float-to-int' instruction.
2605 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2606 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002607 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002608
2609 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002610 // if input >= (float)INT_MAX goto done
2611 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002612 __ j(kAboveEqual, &done);
2613 // if input == NaN goto nan
2614 __ j(kUnordered, &nan);
2615 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002616 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002617 __ jmp(&done);
2618 __ Bind(&nan);
2619 // output = 0
2620 __ xorl(output, output);
2621 __ Bind(&done);
2622 break;
2623 }
2624
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002625 case Primitive::kPrimDouble: {
2626 // Processing a Dex `double-to-int' instruction.
2627 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2628 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002629 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002630
2631 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002632 // if input >= (double)INT_MAX goto done
2633 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002634 __ j(kAboveEqual, &done);
2635 // if input == NaN goto nan
2636 __ j(kUnordered, &nan);
2637 // output = double-to-int-truncate(input)
2638 __ cvttsd2si(output, input);
2639 __ jmp(&done);
2640 __ Bind(&nan);
2641 // output = 0
2642 __ xorl(output, output);
2643 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002644 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002645 }
Roland Levillain946e1432014-11-11 17:35:19 +00002646
2647 default:
2648 LOG(FATAL) << "Unexpected type conversion from " << input_type
2649 << " to " << result_type;
2650 }
2651 break;
2652
Roland Levillaindff1f282014-11-05 14:15:05 +00002653 case Primitive::kPrimLong:
2654 switch (input_type) {
2655 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002656 case Primitive::kPrimBoolean:
2657 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002658 case Primitive::kPrimByte:
2659 case Primitive::kPrimShort:
2660 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002661 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002662 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002663 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002664 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002665 break;
2666
Roland Levillain624279f2014-12-04 11:54:28 +00002667 case Primitive::kPrimFloat: {
2668 // Processing a Dex `float-to-long' instruction.
2669 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2670 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002671 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002672
Mark Mendell92e83bf2015-05-07 11:25:03 -04002673 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002674 // if input >= (float)LONG_MAX goto done
2675 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002676 __ j(kAboveEqual, &done);
2677 // if input == NaN goto nan
2678 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002679 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002680 __ cvttss2si(output, input, true);
2681 __ jmp(&done);
2682 __ Bind(&nan);
2683 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002684 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002685 __ Bind(&done);
2686 break;
2687 }
2688
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002689 case Primitive::kPrimDouble: {
2690 // Processing a Dex `double-to-long' instruction.
2691 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2692 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002693 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002694
Mark Mendell92e83bf2015-05-07 11:25:03 -04002695 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002696 // if input >= (double)LONG_MAX goto done
2697 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002698 __ j(kAboveEqual, &done);
2699 // if input == NaN goto nan
2700 __ j(kUnordered, &nan);
2701 // output = double-to-long-truncate(input)
2702 __ cvttsd2si(output, input, true);
2703 __ jmp(&done);
2704 __ Bind(&nan);
2705 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002706 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002707 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002708 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002709 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002710
2711 default:
2712 LOG(FATAL) << "Unexpected type conversion from " << input_type
2713 << " to " << result_type;
2714 }
2715 break;
2716
Roland Levillain981e4542014-11-14 11:47:14 +00002717 case Primitive::kPrimChar:
2718 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002719 case Primitive::kPrimLong:
2720 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002721 case Primitive::kPrimBoolean:
2722 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002723 case Primitive::kPrimByte:
2724 case Primitive::kPrimShort:
2725 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002726 // Processing a Dex `int-to-char' instruction.
2727 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002728 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002729 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002730 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002731 Address(CpuRegister(RSP), in.GetStackIndex()));
2732 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002733 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002734 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002735 }
2736 break;
2737
2738 default:
2739 LOG(FATAL) << "Unexpected type conversion from " << input_type
2740 << " to " << result_type;
2741 }
2742 break;
2743
Roland Levillaindff1f282014-11-05 14:15:05 +00002744 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002745 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002746 case Primitive::kPrimBoolean:
2747 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002748 case Primitive::kPrimByte:
2749 case Primitive::kPrimShort:
2750 case Primitive::kPrimInt:
2751 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002752 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002753 if (in.IsRegister()) {
2754 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2755 } else if (in.IsConstant()) {
2756 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2757 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002758 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002759 } else {
2760 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2761 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2762 }
Roland Levillaincff13742014-11-17 14:32:17 +00002763 break;
2764
2765 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002766 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002767 if (in.IsRegister()) {
2768 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2769 } else if (in.IsConstant()) {
2770 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2771 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002772 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002773 } else {
2774 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2775 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2776 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002777 break;
2778
Roland Levillaincff13742014-11-17 14:32:17 +00002779 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002780 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002781 if (in.IsFpuRegister()) {
2782 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2783 } else if (in.IsConstant()) {
2784 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2785 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002786 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002787 } else {
2788 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2789 Address(CpuRegister(RSP), in.GetStackIndex()));
2790 }
Roland Levillaincff13742014-11-17 14:32:17 +00002791 break;
2792
2793 default:
2794 LOG(FATAL) << "Unexpected type conversion from " << input_type
2795 << " to " << result_type;
2796 };
2797 break;
2798
Roland Levillaindff1f282014-11-05 14:15:05 +00002799 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002800 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002801 case Primitive::kPrimBoolean:
2802 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002803 case Primitive::kPrimByte:
2804 case Primitive::kPrimShort:
2805 case Primitive::kPrimInt:
2806 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002807 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002808 if (in.IsRegister()) {
2809 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2810 } else if (in.IsConstant()) {
2811 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2812 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002813 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002814 } else {
2815 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2816 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2817 }
Roland Levillaincff13742014-11-17 14:32:17 +00002818 break;
2819
2820 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002821 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002822 if (in.IsRegister()) {
2823 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2824 } else if (in.IsConstant()) {
2825 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2826 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002827 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002828 } else {
2829 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2830 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2831 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002832 break;
2833
Roland Levillaincff13742014-11-17 14:32:17 +00002834 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002835 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002836 if (in.IsFpuRegister()) {
2837 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2838 } else if (in.IsConstant()) {
2839 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2840 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002841 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002842 } else {
2843 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2844 Address(CpuRegister(RSP), in.GetStackIndex()));
2845 }
Roland Levillaincff13742014-11-17 14:32:17 +00002846 break;
2847
2848 default:
2849 LOG(FATAL) << "Unexpected type conversion from " << input_type
2850 << " to " << result_type;
2851 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002852 break;
2853
2854 default:
2855 LOG(FATAL) << "Unexpected type conversion from " << input_type
2856 << " to " << result_type;
2857 }
2858}
2859
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002860void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002861 LocationSummary* locations =
2862 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002863 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002864 case Primitive::kPrimInt: {
2865 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002866 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2867 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002868 break;
2869 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002870
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002871 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002872 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002873 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002874 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002875 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002876 break;
2877 }
2878
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002879 case Primitive::kPrimDouble:
2880 case Primitive::kPrimFloat: {
2881 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002882 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002883 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002884 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002885 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002886
2887 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002888 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002889 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002890}
2891
2892void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2893 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002894 Location first = locations->InAt(0);
2895 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002896 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002897
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002898 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002899 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002900 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002901 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2902 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002903 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2904 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002905 } else {
2906 __ leal(out.AsRegister<CpuRegister>(), Address(
2907 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2908 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002909 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002910 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2911 __ addl(out.AsRegister<CpuRegister>(),
2912 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2913 } else {
2914 __ leal(out.AsRegister<CpuRegister>(), Address(
2915 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2916 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002917 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002918 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002919 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002920 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002921 break;
2922 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002923
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002924 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002925 if (second.IsRegister()) {
2926 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2927 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002928 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2929 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002930 } else {
2931 __ leaq(out.AsRegister<CpuRegister>(), Address(
2932 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2933 }
2934 } else {
2935 DCHECK(second.IsConstant());
2936 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2937 int32_t int32_value = Low32Bits(value);
2938 DCHECK_EQ(int32_value, value);
2939 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2940 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2941 } else {
2942 __ leaq(out.AsRegister<CpuRegister>(), Address(
2943 first.AsRegister<CpuRegister>(), int32_value));
2944 }
2945 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002946 break;
2947 }
2948
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002949 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002950 if (second.IsFpuRegister()) {
2951 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2952 } else if (second.IsConstant()) {
2953 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002954 codegen_->LiteralFloatAddress(
2955 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002956 } else {
2957 DCHECK(second.IsStackSlot());
2958 __ addss(first.AsFpuRegister<XmmRegister>(),
2959 Address(CpuRegister(RSP), second.GetStackIndex()));
2960 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002961 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002962 }
2963
2964 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002965 if (second.IsFpuRegister()) {
2966 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2967 } else if (second.IsConstant()) {
2968 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002969 codegen_->LiteralDoubleAddress(
2970 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002971 } else {
2972 DCHECK(second.IsDoubleStackSlot());
2973 __ addsd(first.AsFpuRegister<XmmRegister>(),
2974 Address(CpuRegister(RSP), second.GetStackIndex()));
2975 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002976 break;
2977 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002978
2979 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002980 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002981 }
2982}
2983
2984void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002985 LocationSummary* locations =
2986 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002987 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002988 case Primitive::kPrimInt: {
2989 locations->SetInAt(0, Location::RequiresRegister());
2990 locations->SetInAt(1, Location::Any());
2991 locations->SetOut(Location::SameAsFirstInput());
2992 break;
2993 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002994 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002995 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04002996 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002997 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002998 break;
2999 }
Calin Juravle11351682014-10-23 15:38:15 +01003000 case Primitive::kPrimFloat:
3001 case Primitive::kPrimDouble: {
3002 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003003 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003004 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003005 break;
Calin Juravle11351682014-10-23 15:38:15 +01003006 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003007 default:
Calin Juravle11351682014-10-23 15:38:15 +01003008 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003009 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003010}
3011
3012void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3013 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003014 Location first = locations->InAt(0);
3015 Location second = locations->InAt(1);
3016 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003017 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003018 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003019 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003020 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003021 } else if (second.IsConstant()) {
3022 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003023 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003024 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003025 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003026 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003027 break;
3028 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003029 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003030 if (second.IsConstant()) {
3031 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3032 DCHECK(IsInt<32>(value));
3033 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3034 } else {
3035 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3036 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037 break;
3038 }
3039
Calin Juravle11351682014-10-23 15:38:15 +01003040 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003041 if (second.IsFpuRegister()) {
3042 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3043 } else if (second.IsConstant()) {
3044 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003045 codegen_->LiteralFloatAddress(
3046 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003047 } else {
3048 DCHECK(second.IsStackSlot());
3049 __ subss(first.AsFpuRegister<XmmRegister>(),
3050 Address(CpuRegister(RSP), second.GetStackIndex()));
3051 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003052 break;
Calin Juravle11351682014-10-23 15:38:15 +01003053 }
3054
3055 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003056 if (second.IsFpuRegister()) {
3057 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3058 } else if (second.IsConstant()) {
3059 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003060 codegen_->LiteralDoubleAddress(
3061 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003062 } else {
3063 DCHECK(second.IsDoubleStackSlot());
3064 __ subsd(first.AsFpuRegister<XmmRegister>(),
3065 Address(CpuRegister(RSP), second.GetStackIndex()));
3066 }
Calin Juravle11351682014-10-23 15:38:15 +01003067 break;
3068 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003069
3070 default:
Calin Juravle11351682014-10-23 15:38:15 +01003071 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003072 }
3073}
3074
Calin Juravle34bacdf2014-10-07 20:23:36 +01003075void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3076 LocationSummary* locations =
3077 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3078 switch (mul->GetResultType()) {
3079 case Primitive::kPrimInt: {
3080 locations->SetInAt(0, Location::RequiresRegister());
3081 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003082 if (mul->InputAt(1)->IsIntConstant()) {
3083 // Can use 3 operand multiply.
3084 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3085 } else {
3086 locations->SetOut(Location::SameAsFirstInput());
3087 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003088 break;
3089 }
3090 case Primitive::kPrimLong: {
3091 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003092 locations->SetInAt(1, Location::Any());
3093 if (mul->InputAt(1)->IsLongConstant() &&
3094 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003095 // Can use 3 operand multiply.
3096 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3097 } else {
3098 locations->SetOut(Location::SameAsFirstInput());
3099 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003100 break;
3101 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003102 case Primitive::kPrimFloat:
3103 case Primitive::kPrimDouble: {
3104 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003105 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003106 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003107 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003108 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003109
3110 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003111 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003112 }
3113}
3114
3115void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3116 LocationSummary* locations = mul->GetLocations();
3117 Location first = locations->InAt(0);
3118 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003119 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003120 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003121 case Primitive::kPrimInt:
3122 // The constant may have ended up in a register, so test explicitly to avoid
3123 // problems where the output may not be the same as the first operand.
3124 if (mul->InputAt(1)->IsIntConstant()) {
3125 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3126 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3127 } else if (second.IsRegister()) {
3128 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003129 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003130 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003131 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003132 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003133 __ imull(first.AsRegister<CpuRegister>(),
3134 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003135 }
3136 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003137 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003138 // The constant may have ended up in a register, so test explicitly to avoid
3139 // problems where the output may not be the same as the first operand.
3140 if (mul->InputAt(1)->IsLongConstant()) {
3141 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3142 if (IsInt<32>(value)) {
3143 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3144 Immediate(static_cast<int32_t>(value)));
3145 } else {
3146 // Have to use the constant area.
3147 DCHECK(first.Equals(out));
3148 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3149 }
3150 } else if (second.IsRegister()) {
3151 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003152 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003153 } else {
3154 DCHECK(second.IsDoubleStackSlot());
3155 DCHECK(first.Equals(out));
3156 __ imulq(first.AsRegister<CpuRegister>(),
3157 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003158 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003159 break;
3160 }
3161
Calin Juravleb5bfa962014-10-21 18:02:24 +01003162 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003163 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003164 if (second.IsFpuRegister()) {
3165 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3166 } else if (second.IsConstant()) {
3167 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003168 codegen_->LiteralFloatAddress(
3169 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003170 } else {
3171 DCHECK(second.IsStackSlot());
3172 __ mulss(first.AsFpuRegister<XmmRegister>(),
3173 Address(CpuRegister(RSP), second.GetStackIndex()));
3174 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003175 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003176 }
3177
3178 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003179 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003180 if (second.IsFpuRegister()) {
3181 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3182 } else if (second.IsConstant()) {
3183 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003184 codegen_->LiteralDoubleAddress(
3185 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003186 } else {
3187 DCHECK(second.IsDoubleStackSlot());
3188 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3189 Address(CpuRegister(RSP), second.GetStackIndex()));
3190 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003191 break;
3192 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003193
3194 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003195 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003196 }
3197}
3198
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003199void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3200 uint32_t stack_adjustment, bool is_float) {
3201 if (source.IsStackSlot()) {
3202 DCHECK(is_float);
3203 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3204 } else if (source.IsDoubleStackSlot()) {
3205 DCHECK(!is_float);
3206 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3207 } else {
3208 // Write the value to the temporary location on the stack and load to FP stack.
3209 if (is_float) {
3210 Location stack_temp = Location::StackSlot(temp_offset);
3211 codegen_->Move(stack_temp, source);
3212 __ flds(Address(CpuRegister(RSP), temp_offset));
3213 } else {
3214 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3215 codegen_->Move(stack_temp, source);
3216 __ fldl(Address(CpuRegister(RSP), temp_offset));
3217 }
3218 }
3219}
3220
3221void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3222 Primitive::Type type = rem->GetResultType();
3223 bool is_float = type == Primitive::kPrimFloat;
3224 size_t elem_size = Primitive::ComponentSize(type);
3225 LocationSummary* locations = rem->GetLocations();
3226 Location first = locations->InAt(0);
3227 Location second = locations->InAt(1);
3228 Location out = locations->Out();
3229
3230 // Create stack space for 2 elements.
3231 // TODO: enhance register allocator to ask for stack temporaries.
3232 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3233
3234 // Load the values to the FP stack in reverse order, using temporaries if needed.
3235 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3236 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3237
3238 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003239 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003240 __ Bind(&retry);
3241 __ fprem();
3242
3243 // Move FP status to AX.
3244 __ fstsw();
3245
3246 // And see if the argument reduction is complete. This is signaled by the
3247 // C2 FPU flag bit set to 0.
3248 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3249 __ j(kNotEqual, &retry);
3250
3251 // We have settled on the final value. Retrieve it into an XMM register.
3252 // Store FP top of stack to real stack.
3253 if (is_float) {
3254 __ fsts(Address(CpuRegister(RSP), 0));
3255 } else {
3256 __ fstl(Address(CpuRegister(RSP), 0));
3257 }
3258
3259 // Pop the 2 items from the FP stack.
3260 __ fucompp();
3261
3262 // Load the value from the stack into an XMM register.
3263 DCHECK(out.IsFpuRegister()) << out;
3264 if (is_float) {
3265 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3266 } else {
3267 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3268 }
3269
3270 // And remove the temporary stack space we allocated.
3271 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3272}
3273
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003274void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3275 DCHECK(instruction->IsDiv() || instruction->IsRem());
3276
3277 LocationSummary* locations = instruction->GetLocations();
3278 Location second = locations->InAt(1);
3279 DCHECK(second.IsConstant());
3280
3281 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3282 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003283 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003284
3285 DCHECK(imm == 1 || imm == -1);
3286
3287 switch (instruction->GetResultType()) {
3288 case Primitive::kPrimInt: {
3289 if (instruction->IsRem()) {
3290 __ xorl(output_register, output_register);
3291 } else {
3292 __ movl(output_register, input_register);
3293 if (imm == -1) {
3294 __ negl(output_register);
3295 }
3296 }
3297 break;
3298 }
3299
3300 case Primitive::kPrimLong: {
3301 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003302 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003303 } else {
3304 __ movq(output_register, input_register);
3305 if (imm == -1) {
3306 __ negq(output_register);
3307 }
3308 }
3309 break;
3310 }
3311
3312 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003313 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003314 }
3315}
3316
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003317void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003318 LocationSummary* locations = instruction->GetLocations();
3319 Location second = locations->InAt(1);
3320
3321 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3322 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3323
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003324 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003325 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3326 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003327
3328 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3329
3330 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003331 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003332 __ testl(numerator, numerator);
3333 __ cmov(kGreaterEqual, tmp, numerator);
3334 int shift = CTZ(imm);
3335 __ sarl(tmp, Immediate(shift));
3336
3337 if (imm < 0) {
3338 __ negl(tmp);
3339 }
3340
3341 __ movl(output_register, tmp);
3342 } else {
3343 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3344 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3345
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003346 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003347 __ addq(rdx, numerator);
3348 __ testq(numerator, numerator);
3349 __ cmov(kGreaterEqual, rdx, numerator);
3350 int shift = CTZ(imm);
3351 __ sarq(rdx, Immediate(shift));
3352
3353 if (imm < 0) {
3354 __ negq(rdx);
3355 }
3356
3357 __ movq(output_register, rdx);
3358 }
3359}
3360
3361void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3362 DCHECK(instruction->IsDiv() || instruction->IsRem());
3363
3364 LocationSummary* locations = instruction->GetLocations();
3365 Location second = locations->InAt(1);
3366
3367 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3368 : locations->GetTemp(0).AsRegister<CpuRegister>();
3369 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3370 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3371 : locations->Out().AsRegister<CpuRegister>();
3372 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3373
3374 DCHECK_EQ(RAX, eax.AsRegister());
3375 DCHECK_EQ(RDX, edx.AsRegister());
3376 if (instruction->IsDiv()) {
3377 DCHECK_EQ(RAX, out.AsRegister());
3378 } else {
3379 DCHECK_EQ(RDX, out.AsRegister());
3380 }
3381
3382 int64_t magic;
3383 int shift;
3384
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003385 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003386 if (instruction->GetResultType() == Primitive::kPrimInt) {
3387 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3388
3389 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3390
3391 __ movl(numerator, eax);
3392
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003393 __ movl(eax, Immediate(magic));
3394 __ imull(numerator);
3395
3396 if (imm > 0 && magic < 0) {
3397 __ addl(edx, numerator);
3398 } else if (imm < 0 && magic > 0) {
3399 __ subl(edx, numerator);
3400 }
3401
3402 if (shift != 0) {
3403 __ sarl(edx, Immediate(shift));
3404 }
3405
3406 __ movl(eax, edx);
3407 __ shrl(edx, Immediate(31));
3408 __ addl(edx, eax);
3409
3410 if (instruction->IsRem()) {
3411 __ movl(eax, numerator);
3412 __ imull(edx, Immediate(imm));
3413 __ subl(eax, edx);
3414 __ movl(edx, eax);
3415 } else {
3416 __ movl(eax, edx);
3417 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003418 } else {
3419 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3420
3421 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3422
3423 CpuRegister rax = eax;
3424 CpuRegister rdx = edx;
3425
3426 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3427
3428 // Save the numerator.
3429 __ movq(numerator, rax);
3430
3431 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003432 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003433
3434 // RDX:RAX = magic * numerator
3435 __ imulq(numerator);
3436
3437 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003438 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003439 __ addq(rdx, numerator);
3440 } else if (imm < 0 && magic > 0) {
3441 // RDX -= numerator
3442 __ subq(rdx, numerator);
3443 }
3444
3445 // Shift if needed.
3446 if (shift != 0) {
3447 __ sarq(rdx, Immediate(shift));
3448 }
3449
3450 // RDX += 1 if RDX < 0
3451 __ movq(rax, rdx);
3452 __ shrq(rdx, Immediate(63));
3453 __ addq(rdx, rax);
3454
3455 if (instruction->IsRem()) {
3456 __ movq(rax, numerator);
3457
3458 if (IsInt<32>(imm)) {
3459 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3460 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003461 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003462 }
3463
3464 __ subq(rax, rdx);
3465 __ movq(rdx, rax);
3466 } else {
3467 __ movq(rax, rdx);
3468 }
3469 }
3470}
3471
Calin Juravlebacfec32014-11-14 15:54:36 +00003472void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3473 DCHECK(instruction->IsDiv() || instruction->IsRem());
3474 Primitive::Type type = instruction->GetResultType();
3475 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3476
3477 bool is_div = instruction->IsDiv();
3478 LocationSummary* locations = instruction->GetLocations();
3479
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003480 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3481 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003482
Roland Levillain271ab9c2014-11-27 15:23:57 +00003483 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003484 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003485
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003486 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003487 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003488
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003489 if (imm == 0) {
3490 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3491 } else if (imm == 1 || imm == -1) {
3492 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003493 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003494 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003495 } else {
3496 DCHECK(imm <= -2 || imm >= 2);
3497 GenerateDivRemWithAnyConstant(instruction);
3498 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003499 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003500 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003501 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003502 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003503 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003504
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003505 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3506 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3507 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3508 // so it's safe to just use negl instead of more complex comparisons.
3509 if (type == Primitive::kPrimInt) {
3510 __ cmpl(second_reg, Immediate(-1));
3511 __ j(kEqual, slow_path->GetEntryLabel());
3512 // edx:eax <- sign-extended of eax
3513 __ cdq();
3514 // eax = quotient, edx = remainder
3515 __ idivl(second_reg);
3516 } else {
3517 __ cmpq(second_reg, Immediate(-1));
3518 __ j(kEqual, slow_path->GetEntryLabel());
3519 // rdx:rax <- sign-extended of rax
3520 __ cqo();
3521 // rax = quotient, rdx = remainder
3522 __ idivq(second_reg);
3523 }
3524 __ Bind(slow_path->GetExitLabel());
3525 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003526}
3527
Calin Juravle7c4954d2014-10-28 16:57:40 +00003528void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3529 LocationSummary* locations =
3530 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3531 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003532 case Primitive::kPrimInt:
3533 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003534 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003535 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003536 locations->SetOut(Location::SameAsFirstInput());
3537 // Intel uses edx:eax as the dividend.
3538 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003539 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3540 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3541 // output and request another temp.
3542 if (div->InputAt(1)->IsConstant()) {
3543 locations->AddTemp(Location::RequiresRegister());
3544 }
Calin Juravled0d48522014-11-04 16:40:20 +00003545 break;
3546 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003547
Calin Juravle7c4954d2014-10-28 16:57:40 +00003548 case Primitive::kPrimFloat:
3549 case Primitive::kPrimDouble: {
3550 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003551 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003552 locations->SetOut(Location::SameAsFirstInput());
3553 break;
3554 }
3555
3556 default:
3557 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3558 }
3559}
3560
3561void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3562 LocationSummary* locations = div->GetLocations();
3563 Location first = locations->InAt(0);
3564 Location second = locations->InAt(1);
3565 DCHECK(first.Equals(locations->Out()));
3566
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003567 Primitive::Type type = div->GetResultType();
3568 switch (type) {
3569 case Primitive::kPrimInt:
3570 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003571 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003572 break;
3573 }
3574
Calin Juravle7c4954d2014-10-28 16:57:40 +00003575 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003576 if (second.IsFpuRegister()) {
3577 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3578 } else if (second.IsConstant()) {
3579 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003580 codegen_->LiteralFloatAddress(
3581 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003582 } else {
3583 DCHECK(second.IsStackSlot());
3584 __ divss(first.AsFpuRegister<XmmRegister>(),
3585 Address(CpuRegister(RSP), second.GetStackIndex()));
3586 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003587 break;
3588 }
3589
3590 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003591 if (second.IsFpuRegister()) {
3592 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3593 } else if (second.IsConstant()) {
3594 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003595 codegen_->LiteralDoubleAddress(
3596 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003597 } else {
3598 DCHECK(second.IsDoubleStackSlot());
3599 __ divsd(first.AsFpuRegister<XmmRegister>(),
3600 Address(CpuRegister(RSP), second.GetStackIndex()));
3601 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003602 break;
3603 }
3604
3605 default:
3606 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3607 }
3608}
3609
Calin Juravlebacfec32014-11-14 15:54:36 +00003610void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003611 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003612 LocationSummary* locations =
3613 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003614
3615 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003616 case Primitive::kPrimInt:
3617 case Primitive::kPrimLong: {
3618 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003619 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003620 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3621 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003622 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3623 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3624 // output and request another temp.
3625 if (rem->InputAt(1)->IsConstant()) {
3626 locations->AddTemp(Location::RequiresRegister());
3627 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003628 break;
3629 }
3630
3631 case Primitive::kPrimFloat:
3632 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003633 locations->SetInAt(0, Location::Any());
3634 locations->SetInAt(1, Location::Any());
3635 locations->SetOut(Location::RequiresFpuRegister());
3636 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003637 break;
3638 }
3639
3640 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003641 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003642 }
3643}
3644
3645void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3646 Primitive::Type type = rem->GetResultType();
3647 switch (type) {
3648 case Primitive::kPrimInt:
3649 case Primitive::kPrimLong: {
3650 GenerateDivRemIntegral(rem);
3651 break;
3652 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003653 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003654 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003655 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003656 break;
3657 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003658 default:
3659 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3660 }
3661}
3662
Calin Juravled0d48522014-11-04 16:40:20 +00003663void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003664 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3665 ? LocationSummary::kCallOnSlowPath
3666 : LocationSummary::kNoCall;
3667 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003668 locations->SetInAt(0, Location::Any());
3669 if (instruction->HasUses()) {
3670 locations->SetOut(Location::SameAsFirstInput());
3671 }
3672}
3673
3674void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003675 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003676 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3677 codegen_->AddSlowPath(slow_path);
3678
3679 LocationSummary* locations = instruction->GetLocations();
3680 Location value = locations->InAt(0);
3681
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003682 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003683 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003684 case Primitive::kPrimByte:
3685 case Primitive::kPrimChar:
3686 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003687 case Primitive::kPrimInt: {
3688 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003689 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003690 __ j(kEqual, slow_path->GetEntryLabel());
3691 } else if (value.IsStackSlot()) {
3692 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3693 __ j(kEqual, slow_path->GetEntryLabel());
3694 } else {
3695 DCHECK(value.IsConstant()) << value;
3696 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3697 __ jmp(slow_path->GetEntryLabel());
3698 }
3699 }
3700 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003701 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003702 case Primitive::kPrimLong: {
3703 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003704 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003705 __ j(kEqual, slow_path->GetEntryLabel());
3706 } else if (value.IsDoubleStackSlot()) {
3707 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3708 __ j(kEqual, slow_path->GetEntryLabel());
3709 } else {
3710 DCHECK(value.IsConstant()) << value;
3711 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3712 __ jmp(slow_path->GetEntryLabel());
3713 }
3714 }
3715 break;
3716 }
3717 default:
3718 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003719 }
Calin Juravled0d48522014-11-04 16:40:20 +00003720}
3721
Calin Juravle9aec02f2014-11-18 23:06:35 +00003722void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3723 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3724
3725 LocationSummary* locations =
3726 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3727
3728 switch (op->GetResultType()) {
3729 case Primitive::kPrimInt:
3730 case Primitive::kPrimLong: {
3731 locations->SetInAt(0, Location::RequiresRegister());
3732 // The shift count needs to be in CL.
3733 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3734 locations->SetOut(Location::SameAsFirstInput());
3735 break;
3736 }
3737 default:
3738 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3739 }
3740}
3741
3742void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3743 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3744
3745 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003746 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003747 Location second = locations->InAt(1);
3748
3749 switch (op->GetResultType()) {
3750 case Primitive::kPrimInt: {
3751 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003752 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003753 if (op->IsShl()) {
3754 __ shll(first_reg, second_reg);
3755 } else if (op->IsShr()) {
3756 __ sarl(first_reg, second_reg);
3757 } else {
3758 __ shrl(first_reg, second_reg);
3759 }
3760 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003761 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003762 if (op->IsShl()) {
3763 __ shll(first_reg, imm);
3764 } else if (op->IsShr()) {
3765 __ sarl(first_reg, imm);
3766 } else {
3767 __ shrl(first_reg, imm);
3768 }
3769 }
3770 break;
3771 }
3772 case Primitive::kPrimLong: {
3773 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003774 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003775 if (op->IsShl()) {
3776 __ shlq(first_reg, second_reg);
3777 } else if (op->IsShr()) {
3778 __ sarq(first_reg, second_reg);
3779 } else {
3780 __ shrq(first_reg, second_reg);
3781 }
3782 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003783 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003784 if (op->IsShl()) {
3785 __ shlq(first_reg, imm);
3786 } else if (op->IsShr()) {
3787 __ sarq(first_reg, imm);
3788 } else {
3789 __ shrq(first_reg, imm);
3790 }
3791 }
3792 break;
3793 }
3794 default:
3795 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003796 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003797 }
3798}
3799
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003800void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3801 LocationSummary* locations =
3802 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3803
3804 switch (ror->GetResultType()) {
3805 case Primitive::kPrimInt:
3806 case Primitive::kPrimLong: {
3807 locations->SetInAt(0, Location::RequiresRegister());
3808 // The shift count needs to be in CL (unless it is a constant).
3809 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3810 locations->SetOut(Location::SameAsFirstInput());
3811 break;
3812 }
3813 default:
3814 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3815 UNREACHABLE();
3816 }
3817}
3818
3819void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3820 LocationSummary* locations = ror->GetLocations();
3821 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3822 Location second = locations->InAt(1);
3823
3824 switch (ror->GetResultType()) {
3825 case Primitive::kPrimInt:
3826 if (second.IsRegister()) {
3827 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3828 __ rorl(first_reg, second_reg);
3829 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003830 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003831 __ rorl(first_reg, imm);
3832 }
3833 break;
3834 case Primitive::kPrimLong:
3835 if (second.IsRegister()) {
3836 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3837 __ rorq(first_reg, second_reg);
3838 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003839 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003840 __ rorq(first_reg, imm);
3841 }
3842 break;
3843 default:
3844 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3845 UNREACHABLE();
3846 }
3847}
3848
Calin Juravle9aec02f2014-11-18 23:06:35 +00003849void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3850 HandleShift(shl);
3851}
3852
3853void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3854 HandleShift(shl);
3855}
3856
3857void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3858 HandleShift(shr);
3859}
3860
3861void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3862 HandleShift(shr);
3863}
3864
3865void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3866 HandleShift(ushr);
3867}
3868
3869void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3870 HandleShift(ushr);
3871}
3872
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003873void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003874 LocationSummary* locations =
3875 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003876 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003877 if (instruction->IsStringAlloc()) {
3878 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3879 } else {
3880 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3881 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3882 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003883 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003884}
3885
3886void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003887 // Note: if heap poisoning is enabled, the entry point takes cares
3888 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003889 if (instruction->IsStringAlloc()) {
3890 // String is allocated through StringFactory. Call NewEmptyString entry point.
3891 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3892 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3893 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3894 __ call(Address(temp, code_offset.SizeValue()));
3895 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3896 } else {
3897 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3898 instruction,
3899 instruction->GetDexPc(),
3900 nullptr);
3901 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3902 DCHECK(!codegen_->IsLeafMethod());
3903 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003904}
3905
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003906void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3907 LocationSummary* locations =
3908 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3909 InvokeRuntimeCallingConvention calling_convention;
3910 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003911 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003912 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003913 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003914}
3915
3916void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3917 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003918 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3919 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003920 // Note: if heap poisoning is enabled, the entry point takes cares
3921 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003922 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3923 instruction,
3924 instruction->GetDexPc(),
3925 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003926 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003927
3928 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003929}
3930
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003931void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003932 LocationSummary* locations =
3933 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003934 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3935 if (location.IsStackSlot()) {
3936 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3937 } else if (location.IsDoubleStackSlot()) {
3938 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3939 }
3940 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003941}
3942
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003943void InstructionCodeGeneratorX86_64::VisitParameterValue(
3944 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003945 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003946}
3947
3948void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3949 LocationSummary* locations =
3950 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3951 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3952}
3953
3954void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3955 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3956 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003957}
3958
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003959void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3960 LocationSummary* locations =
3961 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3962 locations->SetInAt(0, Location::RequiresRegister());
3963 locations->SetOut(Location::RequiresRegister());
3964}
3965
3966void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3967 LocationSummary* locations = instruction->GetLocations();
3968 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00003969 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003970 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3971 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
3972 } else {
3973 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
3974 instruction->GetIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
3975 }
3976 __ movq(locations->Out().AsRegister<CpuRegister>(),
3977 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
3978}
3979
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003980void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003981 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003982 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003983 locations->SetInAt(0, Location::RequiresRegister());
3984 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003985}
3986
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003987void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
3988 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003989 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3990 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003991 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00003992 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003993 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003994 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003995 break;
3996
3997 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003998 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003999 break;
4000
4001 default:
4002 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4003 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004004}
4005
David Brazdil66d126e2015-04-03 16:02:44 +01004006void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4007 LocationSummary* locations =
4008 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4009 locations->SetInAt(0, Location::RequiresRegister());
4010 locations->SetOut(Location::SameAsFirstInput());
4011}
4012
4013void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004014 LocationSummary* locations = bool_not->GetLocations();
4015 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4016 locations->Out().AsRegister<CpuRegister>().AsRegister());
4017 Location out = locations->Out();
4018 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4019}
4020
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004021void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004022 LocationSummary* locations =
4023 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004024 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4025 locations->SetInAt(i, Location::Any());
4026 }
4027 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004028}
4029
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004030void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004031 LOG(FATAL) << "Unimplemented";
4032}
4033
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004034void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004035 /*
4036 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004037 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004038 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4039 */
4040 switch (kind) {
4041 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004042 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004043 break;
4044 }
4045 case MemBarrierKind::kAnyStore:
4046 case MemBarrierKind::kLoadAny:
4047 case MemBarrierKind::kStoreStore: {
4048 // nop
4049 break;
4050 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004051 case MemBarrierKind::kNTStoreStore:
4052 // Non-Temporal Store/Store needs an explicit fence.
4053 MemoryFence(/* non-temporal */ true);
4054 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004055 }
4056}
4057
4058void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4059 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4060
Roland Levillain0d5a2812015-11-13 10:07:31 +00004061 bool object_field_get_with_read_barrier =
4062 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004063 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004064 new (GetGraph()->GetArena()) LocationSummary(instruction,
4065 object_field_get_with_read_barrier ?
4066 LocationSummary::kCallOnSlowPath :
4067 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004068 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004069 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4070 locations->SetOut(Location::RequiresFpuRegister());
4071 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004072 // The output overlaps for an object field get when read barriers
4073 // are enabled: we do not want the move to overwrite the object's
4074 // location, as we need it to emit the read barrier.
4075 locations->SetOut(
4076 Location::RequiresRegister(),
4077 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004078 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004079 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4080 // We need a temporary register for the read barrier marking slow
4081 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4082 locations->AddTemp(Location::RequiresRegister());
4083 }
Calin Juravle52c48962014-12-16 17:02:57 +00004084}
4085
4086void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4087 const FieldInfo& field_info) {
4088 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4089
4090 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004091 Location base_loc = locations->InAt(0);
4092 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004093 Location out = locations->Out();
4094 bool is_volatile = field_info.IsVolatile();
4095 Primitive::Type field_type = field_info.GetFieldType();
4096 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4097
4098 switch (field_type) {
4099 case Primitive::kPrimBoolean: {
4100 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4101 break;
4102 }
4103
4104 case Primitive::kPrimByte: {
4105 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4106 break;
4107 }
4108
4109 case Primitive::kPrimShort: {
4110 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4111 break;
4112 }
4113
4114 case Primitive::kPrimChar: {
4115 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4116 break;
4117 }
4118
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004119 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004120 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4121 break;
4122 }
4123
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004124 case Primitive::kPrimNot: {
4125 // /* HeapReference<Object> */ out = *(base + offset)
4126 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4127 Location temp_loc = locations->GetTemp(0);
4128 // Note that a potential implicit null check is handled in this
4129 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4130 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4131 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4132 if (is_volatile) {
4133 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4134 }
4135 } else {
4136 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4137 codegen_->MaybeRecordImplicitNullCheck(instruction);
4138 if (is_volatile) {
4139 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4140 }
4141 // If read barriers are enabled, emit read barriers other than
4142 // Baker's using a slow path (and also unpoison the loaded
4143 // reference, if heap poisoning is enabled).
4144 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4145 }
4146 break;
4147 }
4148
Calin Juravle52c48962014-12-16 17:02:57 +00004149 case Primitive::kPrimLong: {
4150 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4151 break;
4152 }
4153
4154 case Primitive::kPrimFloat: {
4155 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4156 break;
4157 }
4158
4159 case Primitive::kPrimDouble: {
4160 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4161 break;
4162 }
4163
4164 case Primitive::kPrimVoid:
4165 LOG(FATAL) << "Unreachable type " << field_type;
4166 UNREACHABLE();
4167 }
4168
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004169 if (field_type == Primitive::kPrimNot) {
4170 // Potential implicit null checks, in the case of reference
4171 // fields, are handled in the previous switch statement.
4172 } else {
4173 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004174 }
Roland Levillain4d027112015-07-01 15:41:14 +01004175
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004176 if (is_volatile) {
4177 if (field_type == Primitive::kPrimNot) {
4178 // Memory barriers, in the case of references, are also handled
4179 // in the previous switch statement.
4180 } else {
4181 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4182 }
Roland Levillain4d027112015-07-01 15:41:14 +01004183 }
Calin Juravle52c48962014-12-16 17:02:57 +00004184}
4185
4186void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4187 const FieldInfo& field_info) {
4188 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4189
4190 LocationSummary* locations =
4191 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004192 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004193 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004194 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004195 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004196
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004197 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004198 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004199 if (is_volatile) {
4200 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4201 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4202 } else {
4203 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4204 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004205 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004206 if (is_volatile) {
4207 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4208 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4209 } else {
4210 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4211 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004212 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004213 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004214 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004215 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004216 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004217 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4218 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004219 locations->AddTemp(Location::RequiresRegister());
4220 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004221}
4222
Calin Juravle52c48962014-12-16 17:02:57 +00004223void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004224 const FieldInfo& field_info,
4225 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004226 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4227
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004228 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004229 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4230 Location value = locations->InAt(1);
4231 bool is_volatile = field_info.IsVolatile();
4232 Primitive::Type field_type = field_info.GetFieldType();
4233 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4234
4235 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004236 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004237 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004238
Mark Mendellea5af682015-10-22 17:35:49 -04004239 bool maybe_record_implicit_null_check_done = false;
4240
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004241 switch (field_type) {
4242 case Primitive::kPrimBoolean:
4243 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004244 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004245 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004246 __ movb(Address(base, offset), Immediate(v));
4247 } else {
4248 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4249 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004250 break;
4251 }
4252
4253 case Primitive::kPrimShort:
4254 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004255 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004256 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004257 __ movw(Address(base, offset), Immediate(v));
4258 } else {
4259 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4260 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004261 break;
4262 }
4263
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004264 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004265 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004266 if (value.IsConstant()) {
4267 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004268 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4269 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4270 // Note: if heap poisoning is enabled, no need to poison
4271 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004272 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004273 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004274 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4275 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4276 __ movl(temp, value.AsRegister<CpuRegister>());
4277 __ PoisonHeapReference(temp);
4278 __ movl(Address(base, offset), temp);
4279 } else {
4280 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4281 }
Mark Mendell40741f32015-04-20 22:10:34 -04004282 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004283 break;
4284 }
4285
4286 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004287 if (value.IsConstant()) {
4288 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004289 codegen_->MoveInt64ToAddress(Address(base, offset),
4290 Address(base, offset + sizeof(int32_t)),
4291 v,
4292 instruction);
4293 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004294 } else {
4295 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4296 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004297 break;
4298 }
4299
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004300 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004301 if (value.IsConstant()) {
4302 int32_t v =
4303 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4304 __ movl(Address(base, offset), Immediate(v));
4305 } else {
4306 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4307 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004308 break;
4309 }
4310
4311 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004312 if (value.IsConstant()) {
4313 int64_t v =
4314 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4315 codegen_->MoveInt64ToAddress(Address(base, offset),
4316 Address(base, offset + sizeof(int32_t)),
4317 v,
4318 instruction);
4319 maybe_record_implicit_null_check_done = true;
4320 } else {
4321 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4322 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004323 break;
4324 }
4325
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004326 case Primitive::kPrimVoid:
4327 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004328 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004329 }
Calin Juravle52c48962014-12-16 17:02:57 +00004330
Mark Mendellea5af682015-10-22 17:35:49 -04004331 if (!maybe_record_implicit_null_check_done) {
4332 codegen_->MaybeRecordImplicitNullCheck(instruction);
4333 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004334
4335 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4336 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4337 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004338 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004339 }
4340
Calin Juravle52c48962014-12-16 17:02:57 +00004341 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004342 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004343 }
4344}
4345
4346void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4347 HandleFieldSet(instruction, instruction->GetFieldInfo());
4348}
4349
4350void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004351 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004352}
4353
4354void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004355 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004356}
4357
4358void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004359 HandleFieldGet(instruction, instruction->GetFieldInfo());
4360}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004361
Calin Juravle52c48962014-12-16 17:02:57 +00004362void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4363 HandleFieldGet(instruction);
4364}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004365
Calin Juravle52c48962014-12-16 17:02:57 +00004366void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4367 HandleFieldGet(instruction, instruction->GetFieldInfo());
4368}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004369
Calin Juravle52c48962014-12-16 17:02:57 +00004370void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4371 HandleFieldSet(instruction, instruction->GetFieldInfo());
4372}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004373
Calin Juravle52c48962014-12-16 17:02:57 +00004374void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004375 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004376}
4377
Calin Juravlee460d1d2015-09-29 04:52:17 +01004378void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4379 HUnresolvedInstanceFieldGet* instruction) {
4380 FieldAccessCallingConventionX86_64 calling_convention;
4381 codegen_->CreateUnresolvedFieldLocationSummary(
4382 instruction, instruction->GetFieldType(), calling_convention);
4383}
4384
4385void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4386 HUnresolvedInstanceFieldGet* instruction) {
4387 FieldAccessCallingConventionX86_64 calling_convention;
4388 codegen_->GenerateUnresolvedFieldAccess(instruction,
4389 instruction->GetFieldType(),
4390 instruction->GetFieldIndex(),
4391 instruction->GetDexPc(),
4392 calling_convention);
4393}
4394
4395void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4396 HUnresolvedInstanceFieldSet* instruction) {
4397 FieldAccessCallingConventionX86_64 calling_convention;
4398 codegen_->CreateUnresolvedFieldLocationSummary(
4399 instruction, instruction->GetFieldType(), calling_convention);
4400}
4401
4402void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4403 HUnresolvedInstanceFieldSet* instruction) {
4404 FieldAccessCallingConventionX86_64 calling_convention;
4405 codegen_->GenerateUnresolvedFieldAccess(instruction,
4406 instruction->GetFieldType(),
4407 instruction->GetFieldIndex(),
4408 instruction->GetDexPc(),
4409 calling_convention);
4410}
4411
4412void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4413 HUnresolvedStaticFieldGet* instruction) {
4414 FieldAccessCallingConventionX86_64 calling_convention;
4415 codegen_->CreateUnresolvedFieldLocationSummary(
4416 instruction, instruction->GetFieldType(), calling_convention);
4417}
4418
4419void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4420 HUnresolvedStaticFieldGet* instruction) {
4421 FieldAccessCallingConventionX86_64 calling_convention;
4422 codegen_->GenerateUnresolvedFieldAccess(instruction,
4423 instruction->GetFieldType(),
4424 instruction->GetFieldIndex(),
4425 instruction->GetDexPc(),
4426 calling_convention);
4427}
4428
4429void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4430 HUnresolvedStaticFieldSet* instruction) {
4431 FieldAccessCallingConventionX86_64 calling_convention;
4432 codegen_->CreateUnresolvedFieldLocationSummary(
4433 instruction, instruction->GetFieldType(), calling_convention);
4434}
4435
4436void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4437 HUnresolvedStaticFieldSet* instruction) {
4438 FieldAccessCallingConventionX86_64 calling_convention;
4439 codegen_->GenerateUnresolvedFieldAccess(instruction,
4440 instruction->GetFieldType(),
4441 instruction->GetFieldIndex(),
4442 instruction->GetDexPc(),
4443 calling_convention);
4444}
4445
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004446void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004447 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4448 ? LocationSummary::kCallOnSlowPath
4449 : LocationSummary::kNoCall;
4450 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4451 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004452 ? Location::RequiresRegister()
4453 : Location::Any();
4454 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004455 if (instruction->HasUses()) {
4456 locations->SetOut(Location::SameAsFirstInput());
4457 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004458}
4459
Calin Juravle2ae48182016-03-16 14:05:09 +00004460void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4461 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004462 return;
4463 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004464 LocationSummary* locations = instruction->GetLocations();
4465 Location obj = locations->InAt(0);
4466
4467 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004468 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004469}
4470
Calin Juravle2ae48182016-03-16 14:05:09 +00004471void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004472 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004473 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004474
4475 LocationSummary* locations = instruction->GetLocations();
4476 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004477
4478 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004479 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004480 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004481 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004482 } else {
4483 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004484 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004485 __ jmp(slow_path->GetEntryLabel());
4486 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004487 }
4488 __ j(kEqual, slow_path->GetEntryLabel());
4489}
4490
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004491void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004492 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004493}
4494
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004495void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004496 bool object_array_get_with_read_barrier =
4497 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004498 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004499 new (GetGraph()->GetArena()) LocationSummary(instruction,
4500 object_array_get_with_read_barrier ?
4501 LocationSummary::kCallOnSlowPath :
4502 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004503 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004504 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004505 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4506 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4507 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004508 // The output overlaps for an object array get when read barriers
4509 // are enabled: we do not want the move to overwrite the array's
4510 // location, as we need it to emit the read barrier.
4511 locations->SetOut(
4512 Location::RequiresRegister(),
4513 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004514 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004515 // We need a temporary register for the read barrier marking slow
4516 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4517 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4518 locations->AddTemp(Location::RequiresRegister());
4519 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004520}
4521
4522void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4523 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004524 Location obj_loc = locations->InAt(0);
4525 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004526 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004527 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004528
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004529 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004530 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004531 case Primitive::kPrimBoolean: {
4532 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004533 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004534 if (index.IsConstant()) {
4535 __ movzxb(out, Address(obj,
4536 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4537 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004538 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004539 }
4540 break;
4541 }
4542
4543 case Primitive::kPrimByte: {
4544 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004545 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004546 if (index.IsConstant()) {
4547 __ movsxb(out, Address(obj,
4548 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4549 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004550 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004551 }
4552 break;
4553 }
4554
4555 case Primitive::kPrimShort: {
4556 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004557 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004558 if (index.IsConstant()) {
4559 __ movsxw(out, Address(obj,
4560 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4561 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004562 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004563 }
4564 break;
4565 }
4566
4567 case Primitive::kPrimChar: {
4568 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004569 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004570 if (index.IsConstant()) {
4571 __ movzxw(out, Address(obj,
4572 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4573 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004574 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004575 }
4576 break;
4577 }
4578
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004579 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004580 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004581 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004582 if (index.IsConstant()) {
4583 __ movl(out, Address(obj,
4584 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4585 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004586 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004587 }
4588 break;
4589 }
4590
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004591 case Primitive::kPrimNot: {
4592 static_assert(
4593 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4594 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4595 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4596 // /* HeapReference<Object> */ out =
4597 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4598 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4599 Location temp = locations->GetTemp(0);
4600 // Note that a potential implicit null check is handled in this
4601 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4602 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4603 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4604 } else {
4605 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4606 if (index.IsConstant()) {
4607 uint32_t offset =
4608 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4609 __ movl(out, Address(obj, offset));
4610 codegen_->MaybeRecordImplicitNullCheck(instruction);
4611 // If read barriers are enabled, emit read barriers other than
4612 // Baker's using a slow path (and also unpoison the loaded
4613 // reference, if heap poisoning is enabled).
4614 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4615 } else {
4616 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4617 codegen_->MaybeRecordImplicitNullCheck(instruction);
4618 // If read barriers are enabled, emit read barriers other than
4619 // Baker's using a slow path (and also unpoison the loaded
4620 // reference, if heap poisoning is enabled).
4621 codegen_->MaybeGenerateReadBarrierSlow(
4622 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4623 }
4624 }
4625 break;
4626 }
4627
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004628 case Primitive::kPrimLong: {
4629 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004630 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004631 if (index.IsConstant()) {
4632 __ movq(out, Address(obj,
4633 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4634 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004635 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004636 }
4637 break;
4638 }
4639
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004640 case Primitive::kPrimFloat: {
4641 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004642 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004643 if (index.IsConstant()) {
4644 __ movss(out, Address(obj,
4645 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4646 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004647 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004648 }
4649 break;
4650 }
4651
4652 case Primitive::kPrimDouble: {
4653 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004654 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004655 if (index.IsConstant()) {
4656 __ movsd(out, Address(obj,
4657 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4658 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004659 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004660 }
4661 break;
4662 }
4663
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004664 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004665 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004666 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004667 }
Roland Levillain4d027112015-07-01 15:41:14 +01004668
4669 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004670 // Potential implicit null checks, in the case of reference
4671 // arrays, are handled in the previous switch statement.
4672 } else {
4673 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004674 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004675}
4676
4677void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004678 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004679
4680 bool needs_write_barrier =
4681 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004682 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004683 bool object_array_set_with_read_barrier =
4684 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004685
Nicolas Geoffray39468442014-09-02 15:17:15 +01004686 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004687 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004688 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004689 LocationSummary::kCallOnSlowPath :
4690 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004691
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004692 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004693 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4694 if (Primitive::IsFloatingPointType(value_type)) {
4695 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004696 } else {
4697 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4698 }
4699
4700 if (needs_write_barrier) {
4701 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004702
4703 // This first temporary register is possibly used for heap
4704 // reference poisoning and/or read barrier emission too.
4705 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004706 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004707 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004708}
4709
4710void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4711 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004712 Location array_loc = locations->InAt(0);
4713 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004714 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004715 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004716 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004717 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004718 bool needs_write_barrier =
4719 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004720 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4721 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4722 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004723
4724 switch (value_type) {
4725 case Primitive::kPrimBoolean:
4726 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004727 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4728 Address address = index.IsConstant()
4729 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4730 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4731 if (value.IsRegister()) {
4732 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004733 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004734 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004735 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004736 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004737 break;
4738 }
4739
4740 case Primitive::kPrimShort:
4741 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004742 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4743 Address address = index.IsConstant()
4744 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4745 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4746 if (value.IsRegister()) {
4747 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004748 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004749 DCHECK(value.IsConstant()) << value;
4750 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004751 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004752 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004753 break;
4754 }
4755
4756 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004757 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4758 Address address = index.IsConstant()
4759 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4760 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004761
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004762 if (!value.IsRegister()) {
4763 // Just setting null.
4764 DCHECK(instruction->InputAt(2)->IsNullConstant());
4765 DCHECK(value.IsConstant()) << value;
4766 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004767 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004768 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004769 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004770 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004771 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004772
4773 DCHECK(needs_write_barrier);
4774 CpuRegister register_value = value.AsRegister<CpuRegister>();
4775 NearLabel done, not_null, do_put;
4776 SlowPathCode* slow_path = nullptr;
4777 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004778 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004779 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4780 codegen_->AddSlowPath(slow_path);
4781 if (instruction->GetValueCanBeNull()) {
4782 __ testl(register_value, register_value);
4783 __ j(kNotEqual, &not_null);
4784 __ movl(address, Immediate(0));
4785 codegen_->MaybeRecordImplicitNullCheck(instruction);
4786 __ jmp(&done);
4787 __ Bind(&not_null);
4788 }
4789
Roland Levillain0d5a2812015-11-13 10:07:31 +00004790 if (kEmitCompilerReadBarrier) {
4791 // When read barriers are enabled, the type checking
4792 // instrumentation requires two read barriers:
4793 //
4794 // __ movl(temp2, temp);
4795 // // /* HeapReference<Class> */ temp = temp->component_type_
4796 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004797 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004798 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4799 //
4800 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4801 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004802 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004803 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4804 //
4805 // __ cmpl(temp, temp2);
4806 //
4807 // However, the second read barrier may trash `temp`, as it
4808 // is a temporary register, and as such would not be saved
4809 // along with live registers before calling the runtime (nor
4810 // restored afterwards). So in this case, we bail out and
4811 // delegate the work to the array set slow path.
4812 //
4813 // TODO: Extend the register allocator to support a new
4814 // "(locally) live temp" location so as to avoid always
4815 // going into the slow path when read barriers are enabled.
4816 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004817 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004818 // /* HeapReference<Class> */ temp = array->klass_
4819 __ movl(temp, Address(array, class_offset));
4820 codegen_->MaybeRecordImplicitNullCheck(instruction);
4821 __ MaybeUnpoisonHeapReference(temp);
4822
4823 // /* HeapReference<Class> */ temp = temp->component_type_
4824 __ movl(temp, Address(temp, component_offset));
4825 // If heap poisoning is enabled, no need to unpoison `temp`
4826 // nor the object reference in `register_value->klass`, as
4827 // we are comparing two poisoned references.
4828 __ cmpl(temp, Address(register_value, class_offset));
4829
4830 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4831 __ j(kEqual, &do_put);
4832 // If heap poisoning is enabled, the `temp` reference has
4833 // not been unpoisoned yet; unpoison it now.
4834 __ MaybeUnpoisonHeapReference(temp);
4835
4836 // /* HeapReference<Class> */ temp = temp->super_class_
4837 __ movl(temp, Address(temp, super_offset));
4838 // If heap poisoning is enabled, no need to unpoison
4839 // `temp`, as we are comparing against null below.
4840 __ testl(temp, temp);
4841 __ j(kNotEqual, slow_path->GetEntryLabel());
4842 __ Bind(&do_put);
4843 } else {
4844 __ j(kNotEqual, slow_path->GetEntryLabel());
4845 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004846 }
4847 }
4848
4849 if (kPoisonHeapReferences) {
4850 __ movl(temp, register_value);
4851 __ PoisonHeapReference(temp);
4852 __ movl(address, temp);
4853 } else {
4854 __ movl(address, register_value);
4855 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004856 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004857 codegen_->MaybeRecordImplicitNullCheck(instruction);
4858 }
4859
4860 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4861 codegen_->MarkGCCard(
4862 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4863 __ Bind(&done);
4864
4865 if (slow_path != nullptr) {
4866 __ Bind(slow_path->GetExitLabel());
4867 }
4868
4869 break;
4870 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004871
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004872 case Primitive::kPrimInt: {
4873 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4874 Address address = index.IsConstant()
4875 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4876 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4877 if (value.IsRegister()) {
4878 __ movl(address, value.AsRegister<CpuRegister>());
4879 } else {
4880 DCHECK(value.IsConstant()) << value;
4881 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4882 __ movl(address, Immediate(v));
4883 }
4884 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004885 break;
4886 }
4887
4888 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004889 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4890 Address address = index.IsConstant()
4891 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4892 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4893 if (value.IsRegister()) {
4894 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004895 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004896 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004897 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004898 Address address_high = index.IsConstant()
4899 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4900 offset + sizeof(int32_t))
4901 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4902 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004903 }
4904 break;
4905 }
4906
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004907 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004908 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4909 Address address = index.IsConstant()
4910 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4911 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004912 if (value.IsFpuRegister()) {
4913 __ movss(address, value.AsFpuRegister<XmmRegister>());
4914 } else {
4915 DCHECK(value.IsConstant());
4916 int32_t v =
4917 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4918 __ movl(address, Immediate(v));
4919 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004920 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004921 break;
4922 }
4923
4924 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004925 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4926 Address address = index.IsConstant()
4927 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4928 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004929 if (value.IsFpuRegister()) {
4930 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4931 codegen_->MaybeRecordImplicitNullCheck(instruction);
4932 } else {
4933 int64_t v =
4934 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4935 Address address_high = index.IsConstant()
4936 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4937 offset + sizeof(int32_t))
4938 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4939 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4940 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004941 break;
4942 }
4943
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004944 case Primitive::kPrimVoid:
4945 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004946 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004947 }
4948}
4949
4950void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004951 LocationSummary* locations =
4952 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004953 locations->SetInAt(0, Location::RequiresRegister());
4954 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004955}
4956
4957void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
4958 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01004959 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00004960 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4961 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004962 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004963 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004964}
4965
4966void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004967 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4968 ? LocationSummary::kCallOnSlowPath
4969 : LocationSummary::kNoCall;
4970 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004971 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04004972 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004973 if (instruction->HasUses()) {
4974 locations->SetOut(Location::SameAsFirstInput());
4975 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004976}
4977
4978void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4979 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004980 Location index_loc = locations->InAt(0);
4981 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07004982 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004983 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004984
Mark Mendell99dbd682015-04-22 16:18:52 -04004985 if (length_loc.IsConstant()) {
4986 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4987 if (index_loc.IsConstant()) {
4988 // BCE will remove the bounds check if we are guarenteed to pass.
4989 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4990 if (index < 0 || index >= length) {
4991 codegen_->AddSlowPath(slow_path);
4992 __ jmp(slow_path->GetEntryLabel());
4993 } else {
4994 // Some optimization after BCE may have generated this, and we should not
4995 // generate a bounds check if it is a valid range.
4996 }
4997 return;
4998 }
4999
5000 // We have to reverse the jump condition because the length is the constant.
5001 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5002 __ cmpl(index_reg, Immediate(length));
5003 codegen_->AddSlowPath(slow_path);
5004 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005005 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005006 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5007 if (index_loc.IsConstant()) {
5008 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5009 __ cmpl(length, Immediate(value));
5010 } else {
5011 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5012 }
5013 codegen_->AddSlowPath(slow_path);
5014 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005015 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005016}
5017
5018void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5019 CpuRegister card,
5020 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005021 CpuRegister value,
5022 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005023 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005024 if (value_can_be_null) {
5025 __ testl(value, value);
5026 __ j(kEqual, &is_null);
5027 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005028 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5029 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005030 __ movq(temp, object);
5031 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005032 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005033 if (value_can_be_null) {
5034 __ Bind(&is_null);
5035 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005036}
5037
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005038void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005039 LOG(FATAL) << "Unimplemented";
5040}
5041
5042void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005043 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5044}
5045
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005046void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5047 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5048}
5049
5050void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005051 HBasicBlock* block = instruction->GetBlock();
5052 if (block->GetLoopInformation() != nullptr) {
5053 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5054 // The back edge will generate the suspend check.
5055 return;
5056 }
5057 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5058 // The goto will generate the suspend check.
5059 return;
5060 }
5061 GenerateSuspendCheck(instruction, nullptr);
5062}
5063
5064void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5065 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005066 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005067 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5068 if (slow_path == nullptr) {
5069 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5070 instruction->SetSlowPath(slow_path);
5071 codegen_->AddSlowPath(slow_path);
5072 if (successor != nullptr) {
5073 DCHECK(successor->IsLoopHeader());
5074 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5075 }
5076 } else {
5077 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5078 }
5079
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005080 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5081 /* no_rip */ true),
5082 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005083 if (successor == nullptr) {
5084 __ j(kNotEqual, slow_path->GetEntryLabel());
5085 __ Bind(slow_path->GetReturnLabel());
5086 } else {
5087 __ j(kEqual, codegen_->GetLabelOf(successor));
5088 __ jmp(slow_path->GetEntryLabel());
5089 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005090}
5091
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005092X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5093 return codegen_->GetAssembler();
5094}
5095
5096void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005097 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005098 Location source = move->GetSource();
5099 Location destination = move->GetDestination();
5100
5101 if (source.IsRegister()) {
5102 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005103 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005104 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005105 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005106 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005107 } else {
5108 DCHECK(destination.IsDoubleStackSlot());
5109 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005110 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005111 }
5112 } else if (source.IsStackSlot()) {
5113 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005114 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005115 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005116 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005117 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005118 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005119 } else {
5120 DCHECK(destination.IsStackSlot());
5121 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5122 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5123 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005124 } else if (source.IsDoubleStackSlot()) {
5125 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005126 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005127 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005128 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005129 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5130 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005131 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005132 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005133 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5134 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5135 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005136 } else if (source.IsConstant()) {
5137 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005138 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5139 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005140 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005141 if (value == 0) {
5142 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5143 } else {
5144 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5145 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005146 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005147 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005148 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005149 }
5150 } else if (constant->IsLongConstant()) {
5151 int64_t value = constant->AsLongConstant()->GetValue();
5152 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005153 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005154 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005155 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005156 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005157 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005158 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005159 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005160 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005161 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005162 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005163 } else {
5164 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005165 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005166 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5167 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005168 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005169 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005170 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005171 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005172 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005173 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005174 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005175 } else {
5176 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005177 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005178 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005179 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005180 } else if (source.IsFpuRegister()) {
5181 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005182 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005183 } else if (destination.IsStackSlot()) {
5184 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005185 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005186 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005187 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005188 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005189 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005190 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005191 }
5192}
5193
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005194void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005195 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005196 __ movl(Address(CpuRegister(RSP), mem), reg);
5197 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005198}
5199
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005200void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005201 ScratchRegisterScope ensure_scratch(
5202 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5203
5204 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5205 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5206 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5207 Address(CpuRegister(RSP), mem2 + stack_offset));
5208 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5209 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5210 CpuRegister(ensure_scratch.GetRegister()));
5211}
5212
Mark Mendell8a1c7282015-06-29 15:41:28 -04005213void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5214 __ movq(CpuRegister(TMP), reg1);
5215 __ movq(reg1, reg2);
5216 __ movq(reg2, CpuRegister(TMP));
5217}
5218
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005219void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5220 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5221 __ movq(Address(CpuRegister(RSP), mem), reg);
5222 __ movq(reg, CpuRegister(TMP));
5223}
5224
5225void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5226 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005227 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005228
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005229 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5230 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5231 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5232 Address(CpuRegister(RSP), mem2 + stack_offset));
5233 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5234 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5235 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005236}
5237
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005238void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5239 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5240 __ movss(Address(CpuRegister(RSP), mem), reg);
5241 __ movd(reg, CpuRegister(TMP));
5242}
5243
5244void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5245 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5246 __ movsd(Address(CpuRegister(RSP), mem), reg);
5247 __ movd(reg, CpuRegister(TMP));
5248}
5249
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005250void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005251 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005252 Location source = move->GetSource();
5253 Location destination = move->GetDestination();
5254
5255 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005256 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005257 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005258 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005259 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005260 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005261 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005262 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5263 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005264 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005265 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005266 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005267 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5268 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005269 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005270 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5271 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5272 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005273 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005274 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005275 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005276 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005277 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005278 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005279 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005280 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005281 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005282 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005283 }
5284}
5285
5286
5287void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5288 __ pushq(CpuRegister(reg));
5289}
5290
5291
5292void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5293 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005294}
5295
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005296void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005297 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005298 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5299 Immediate(mirror::Class::kStatusInitialized));
5300 __ j(kLess, slow_path->GetEntryLabel());
5301 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005302 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005303}
5304
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005305void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005306 InvokeRuntimeCallingConvention calling_convention;
5307 CodeGenerator::CreateLoadClassLocationSummary(
5308 cls,
5309 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005310 Location::RegisterLocation(RAX),
5311 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005312}
5313
5314void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005315 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005316 if (cls->NeedsAccessCheck()) {
5317 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5318 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5319 cls,
5320 cls->GetDexPc(),
5321 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005322 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005323 return;
5324 }
5325
Roland Levillain0d5a2812015-11-13 10:07:31 +00005326 Location out_loc = locations->Out();
5327 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005328 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005329
Calin Juravle580b6092015-10-06 17:35:58 +01005330 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005331 DCHECK(!cls->CanCallRuntime());
5332 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005333 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5334 GenerateGcRootFieldLoad(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005335 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005336 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005337 // /* GcRoot<mirror::Class>[] */ out =
5338 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5339 __ movq(out, Address(current_method,
5340 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005341 // /* GcRoot<mirror::Class> */ out = out[type_index]
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005342 GenerateGcRootFieldLoad(
5343 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Roland Levillain4d027112015-07-01 15:41:14 +01005344
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005345 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5346 DCHECK(cls->CanCallRuntime());
5347 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5348 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5349 codegen_->AddSlowPath(slow_path);
5350 if (!cls->IsInDexCache()) {
5351 __ testl(out, out);
5352 __ j(kEqual, slow_path->GetEntryLabel());
5353 }
5354 if (cls->MustGenerateClinitCheck()) {
5355 GenerateClassInitializationCheck(slow_path, out);
5356 } else {
5357 __ Bind(slow_path->GetExitLabel());
5358 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005359 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005360 }
5361}
5362
5363void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5364 LocationSummary* locations =
5365 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5366 locations->SetInAt(0, Location::RequiresRegister());
5367 if (check->HasUses()) {
5368 locations->SetOut(Location::SameAsFirstInput());
5369 }
5370}
5371
5372void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005373 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005374 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005375 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005376 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005377 GenerateClassInitializationCheck(slow_path,
5378 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005379}
5380
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005381HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5382 HLoadString::LoadKind desired_string_load_kind) {
5383 if (kEmitCompilerReadBarrier) {
5384 switch (desired_string_load_kind) {
5385 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5386 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5387 case HLoadString::LoadKind::kBootImageAddress:
5388 // TODO: Implement for read barrier.
5389 return HLoadString::LoadKind::kDexCacheViaMethod;
5390 default:
5391 break;
5392 }
5393 }
5394 switch (desired_string_load_kind) {
5395 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5396 DCHECK(!GetCompilerOptions().GetCompilePic());
5397 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5398 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5399 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5400 DCHECK(GetCompilerOptions().GetCompilePic());
5401 break;
5402 case HLoadString::LoadKind::kBootImageAddress:
5403 break;
5404 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005405 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005406 break;
5407 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005408 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005409 break;
5410 case HLoadString::LoadKind::kDexCacheViaMethod:
5411 break;
5412 }
5413 return desired_string_load_kind;
5414}
5415
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005416void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005417 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005418 ? LocationSummary::kCallOnSlowPath
5419 : LocationSummary::kNoCall;
5420 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005421 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5422 locations->SetInAt(0, Location::RequiresRegister());
5423 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005424 locations->SetOut(Location::RequiresRegister());
5425}
5426
5427void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005428 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005429 Location out_loc = locations->Out();
5430 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005431
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005432 switch (load->GetLoadKind()) {
5433 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5434 DCHECK(!kEmitCompilerReadBarrier);
5435 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5436 codegen_->RecordStringPatch(load);
5437 return; // No dex cache slow path.
5438 }
5439 case HLoadString::LoadKind::kBootImageAddress: {
5440 DCHECK(!kEmitCompilerReadBarrier);
5441 DCHECK_NE(load->GetAddress(), 0u);
5442 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5443 __ movl(out, Immediate(address)); // Zero-extended.
5444 codegen_->RecordSimplePatch();
5445 return; // No dex cache slow path.
5446 }
5447 case HLoadString::LoadKind::kDexCacheAddress: {
5448 DCHECK_NE(load->GetAddress(), 0u);
5449 if (IsUint<32>(load->GetAddress())) {
5450 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5451 GenerateGcRootFieldLoad(load, out_loc, address);
5452 } else {
5453 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5454 __ movq(out, Immediate(load->GetAddress()));
5455 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5456 }
5457 break;
5458 }
5459 case HLoadString::LoadKind::kDexCachePcRelative: {
5460 uint32_t offset = load->GetDexCacheElementOffset();
5461 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5462 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5463 /* no_rip */ false);
5464 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5465 break;
5466 }
5467 case HLoadString::LoadKind::kDexCacheViaMethod: {
5468 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5469
5470 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5471 GenerateGcRootFieldLoad(
5472 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5473 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5474 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5475 // /* GcRoot<mirror::String> */ out = out[string_index]
5476 GenerateGcRootFieldLoad(
5477 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5478 break;
5479 }
5480 default:
5481 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5482 UNREACHABLE();
5483 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005484
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005485 if (!load->IsInDexCache()) {
5486 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5487 codegen_->AddSlowPath(slow_path);
5488 __ testl(out, out);
5489 __ j(kEqual, slow_path->GetEntryLabel());
5490 __ Bind(slow_path->GetExitLabel());
5491 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005492}
5493
David Brazdilcb1c0552015-08-04 16:22:25 +01005494static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005495 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5496 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005497}
5498
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005499void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5500 LocationSummary* locations =
5501 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5502 locations->SetOut(Location::RequiresRegister());
5503}
5504
5505void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005506 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5507}
5508
5509void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5510 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5511}
5512
5513void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5514 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005515}
5516
5517void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5518 LocationSummary* locations =
5519 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5520 InvokeRuntimeCallingConvention calling_convention;
5521 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5522}
5523
5524void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005525 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5526 instruction,
5527 instruction->GetDexPc(),
5528 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005529 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005530}
5531
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005532static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5533 return kEmitCompilerReadBarrier &&
5534 (kUseBakerReadBarrier ||
5535 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5536 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5537 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5538}
5539
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005540void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005541 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005542 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5543 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005544 case TypeCheckKind::kExactCheck:
5545 case TypeCheckKind::kAbstractClassCheck:
5546 case TypeCheckKind::kClassHierarchyCheck:
5547 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005548 call_kind =
5549 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005550 break;
5551 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005552 case TypeCheckKind::kUnresolvedCheck:
5553 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005554 call_kind = LocationSummary::kCallOnSlowPath;
5555 break;
5556 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005557
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005558 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005559 locations->SetInAt(0, Location::RequiresRegister());
5560 locations->SetInAt(1, Location::Any());
5561 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5562 locations->SetOut(Location::RequiresRegister());
5563 // When read barriers are enabled, we need a temporary register for
5564 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005565 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005566 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005567 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005568}
5569
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005570void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005571 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005572 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005573 Location obj_loc = locations->InAt(0);
5574 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005575 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005576 Location out_loc = locations->Out();
5577 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005578 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005579 locations->GetTemp(0) :
5580 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005581 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005582 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5583 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5584 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005585 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005586 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005587
5588 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005589 // Avoid null check if we know obj is not null.
5590 if (instruction->MustDoNullCheck()) {
5591 __ testl(obj, obj);
5592 __ j(kEqual, &zero);
5593 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005594
Roland Levillain0d5a2812015-11-13 10:07:31 +00005595 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005596 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005597
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005598 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005599 case TypeCheckKind::kExactCheck: {
5600 if (cls.IsRegister()) {
5601 __ cmpl(out, cls.AsRegister<CpuRegister>());
5602 } else {
5603 DCHECK(cls.IsStackSlot()) << cls;
5604 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5605 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005606 if (zero.IsLinked()) {
5607 // Classes must be equal for the instanceof to succeed.
5608 __ j(kNotEqual, &zero);
5609 __ movl(out, Immediate(1));
5610 __ jmp(&done);
5611 } else {
5612 __ setcc(kEqual, out);
5613 // setcc only sets the low byte.
5614 __ andl(out, Immediate(1));
5615 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005616 break;
5617 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005618
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005619 case TypeCheckKind::kAbstractClassCheck: {
5620 // If the class is abstract, we eagerly fetch the super class of the
5621 // object to avoid doing a comparison we know will fail.
5622 NearLabel loop, success;
5623 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005624 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005625 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005626 __ testl(out, out);
5627 // If `out` is null, we use it for the result, and jump to `done`.
5628 __ j(kEqual, &done);
5629 if (cls.IsRegister()) {
5630 __ cmpl(out, cls.AsRegister<CpuRegister>());
5631 } else {
5632 DCHECK(cls.IsStackSlot()) << cls;
5633 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5634 }
5635 __ j(kNotEqual, &loop);
5636 __ movl(out, Immediate(1));
5637 if (zero.IsLinked()) {
5638 __ jmp(&done);
5639 }
5640 break;
5641 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005642
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005643 case TypeCheckKind::kClassHierarchyCheck: {
5644 // Walk over the class hierarchy to find a match.
5645 NearLabel loop, success;
5646 __ Bind(&loop);
5647 if (cls.IsRegister()) {
5648 __ cmpl(out, cls.AsRegister<CpuRegister>());
5649 } else {
5650 DCHECK(cls.IsStackSlot()) << cls;
5651 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5652 }
5653 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005654 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005655 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005656 __ testl(out, out);
5657 __ j(kNotEqual, &loop);
5658 // If `out` is null, we use it for the result, and jump to `done`.
5659 __ jmp(&done);
5660 __ Bind(&success);
5661 __ movl(out, Immediate(1));
5662 if (zero.IsLinked()) {
5663 __ jmp(&done);
5664 }
5665 break;
5666 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005667
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005668 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005669 // Do an exact check.
5670 NearLabel exact_check;
5671 if (cls.IsRegister()) {
5672 __ cmpl(out, cls.AsRegister<CpuRegister>());
5673 } else {
5674 DCHECK(cls.IsStackSlot()) << cls;
5675 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5676 }
5677 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005678 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005679 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005680 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005681 __ testl(out, out);
5682 // If `out` is null, we use it for the result, and jump to `done`.
5683 __ j(kEqual, &done);
5684 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5685 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005686 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005687 __ movl(out, Immediate(1));
5688 __ jmp(&done);
5689 break;
5690 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005691
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005692 case TypeCheckKind::kArrayCheck: {
5693 if (cls.IsRegister()) {
5694 __ cmpl(out, cls.AsRegister<CpuRegister>());
5695 } else {
5696 DCHECK(cls.IsStackSlot()) << cls;
5697 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5698 }
5699 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005700 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5701 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005702 codegen_->AddSlowPath(slow_path);
5703 __ j(kNotEqual, slow_path->GetEntryLabel());
5704 __ movl(out, Immediate(1));
5705 if (zero.IsLinked()) {
5706 __ jmp(&done);
5707 }
5708 break;
5709 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005710
Calin Juravle98893e12015-10-02 21:05:03 +01005711 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005712 case TypeCheckKind::kInterfaceCheck: {
5713 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005714 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005715 // cases.
5716 //
5717 // We cannot directly call the InstanceofNonTrivial runtime
5718 // entry point without resorting to a type checking slow path
5719 // here (i.e. by calling InvokeRuntime directly), as it would
5720 // require to assign fixed registers for the inputs of this
5721 // HInstanceOf instruction (following the runtime calling
5722 // convention), which might be cluttered by the potential first
5723 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005724 //
5725 // TODO: Introduce a new runtime entry point taking the object
5726 // to test (instead of its class) as argument, and let it deal
5727 // with the read barrier issues. This will let us refactor this
5728 // case of the `switch` code as it was previously (with a direct
5729 // call to the runtime not using a type checking slow path).
5730 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005731 DCHECK(locations->OnlyCallsOnSlowPath());
5732 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5733 /* is_fatal */ false);
5734 codegen_->AddSlowPath(slow_path);
5735 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005736 if (zero.IsLinked()) {
5737 __ jmp(&done);
5738 }
5739 break;
5740 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005741 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005742
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005743 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005744 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005745 __ xorl(out, out);
5746 }
5747
5748 if (done.IsLinked()) {
5749 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005750 }
5751
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005752 if (slow_path != nullptr) {
5753 __ Bind(slow_path->GetExitLabel());
5754 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005755}
5756
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005757void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005758 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5759 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005760 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5761 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005762 case TypeCheckKind::kExactCheck:
5763 case TypeCheckKind::kAbstractClassCheck:
5764 case TypeCheckKind::kClassHierarchyCheck:
5765 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005766 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5767 LocationSummary::kCallOnSlowPath :
5768 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005769 break;
5770 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005771 case TypeCheckKind::kUnresolvedCheck:
5772 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005773 call_kind = LocationSummary::kCallOnSlowPath;
5774 break;
5775 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005776 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5777 locations->SetInAt(0, Location::RequiresRegister());
5778 locations->SetInAt(1, Location::Any());
5779 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5780 locations->AddTemp(Location::RequiresRegister());
5781 // When read barriers are enabled, we need an additional temporary
5782 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005783 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005784 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005785 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005786}
5787
5788void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005789 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005790 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005791 Location obj_loc = locations->InAt(0);
5792 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005793 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005794 Location temp_loc = locations->GetTemp(0);
5795 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005796 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005797 locations->GetTemp(1) :
5798 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005799 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5800 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5801 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5802 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005803
Roland Levillain0d5a2812015-11-13 10:07:31 +00005804 bool is_type_check_slow_path_fatal =
5805 (type_check_kind == TypeCheckKind::kExactCheck ||
5806 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5807 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5808 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5809 !instruction->CanThrowIntoCatchBlock();
5810 SlowPathCode* type_check_slow_path =
5811 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5812 is_type_check_slow_path_fatal);
5813 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005814
Roland Levillain0d5a2812015-11-13 10:07:31 +00005815 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005816 case TypeCheckKind::kExactCheck:
5817 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005818 NearLabel done;
5819 // Avoid null check if we know obj is not null.
5820 if (instruction->MustDoNullCheck()) {
5821 __ testl(obj, obj);
5822 __ j(kEqual, &done);
5823 }
5824
5825 // /* HeapReference<Class> */ temp = obj->klass_
5826 GenerateReferenceLoadTwoRegisters(
5827 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5828
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005829 if (cls.IsRegister()) {
5830 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5831 } else {
5832 DCHECK(cls.IsStackSlot()) << cls;
5833 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5834 }
5835 // Jump to slow path for throwing the exception or doing a
5836 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005837 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005838 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005839 break;
5840 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005841
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005842 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005843 NearLabel done;
5844 // Avoid null check if we know obj is not null.
5845 if (instruction->MustDoNullCheck()) {
5846 __ testl(obj, obj);
5847 __ j(kEqual, &done);
5848 }
5849
5850 // /* HeapReference<Class> */ temp = obj->klass_
5851 GenerateReferenceLoadTwoRegisters(
5852 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5853
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005854 // If the class is abstract, we eagerly fetch the super class of the
5855 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005856 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005857 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005858 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005859 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005860
5861 // If the class reference currently in `temp` is not null, jump
5862 // to the `compare_classes` label to compare it with the checked
5863 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005864 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005865 __ j(kNotEqual, &compare_classes);
5866 // Otherwise, jump to the slow path to throw the exception.
5867 //
5868 // But before, move back the object's class into `temp` before
5869 // going into the slow path, as it has been overwritten in the
5870 // meantime.
5871 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005872 GenerateReferenceLoadTwoRegisters(
5873 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005874 __ jmp(type_check_slow_path->GetEntryLabel());
5875
5876 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005877 if (cls.IsRegister()) {
5878 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5879 } else {
5880 DCHECK(cls.IsStackSlot()) << cls;
5881 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5882 }
5883 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005884 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005885 break;
5886 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005887
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005888 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005889 NearLabel done;
5890 // Avoid null check if we know obj is not null.
5891 if (instruction->MustDoNullCheck()) {
5892 __ testl(obj, obj);
5893 __ j(kEqual, &done);
5894 }
5895
5896 // /* HeapReference<Class> */ temp = obj->klass_
5897 GenerateReferenceLoadTwoRegisters(
5898 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5899
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005900 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005901 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005902 __ Bind(&loop);
5903 if (cls.IsRegister()) {
5904 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5905 } else {
5906 DCHECK(cls.IsStackSlot()) << cls;
5907 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5908 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005909 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005910
Roland Levillain0d5a2812015-11-13 10:07:31 +00005911 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005912 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005913
5914 // If the class reference currently in `temp` is not null, jump
5915 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005916 __ testl(temp, temp);
5917 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005918 // Otherwise, jump to the slow path to throw the exception.
5919 //
5920 // But before, move back the object's class into `temp` before
5921 // going into the slow path, as it has been overwritten in the
5922 // meantime.
5923 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005924 GenerateReferenceLoadTwoRegisters(
5925 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005926 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005927 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005928 break;
5929 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005930
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005931 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005932 // We cannot use a NearLabel here, as its range might be too
5933 // short in some cases when read barriers are enabled. This has
5934 // been observed for instance when the code emitted for this
5935 // case uses high x86-64 registers (R8-R15).
5936 Label done;
5937 // Avoid null check if we know obj is not null.
5938 if (instruction->MustDoNullCheck()) {
5939 __ testl(obj, obj);
5940 __ j(kEqual, &done);
5941 }
5942
5943 // /* HeapReference<Class> */ temp = obj->klass_
5944 GenerateReferenceLoadTwoRegisters(
5945 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5946
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005947 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005948 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005949 if (cls.IsRegister()) {
5950 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5951 } else {
5952 DCHECK(cls.IsStackSlot()) << cls;
5953 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5954 }
5955 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005956
5957 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005958 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005959 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005960
5961 // If the component type is not null (i.e. the object is indeed
5962 // an array), jump to label `check_non_primitive_component_type`
5963 // to further check that this component type is not a primitive
5964 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005965 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005966 __ j(kNotEqual, &check_non_primitive_component_type);
5967 // Otherwise, jump to the slow path to throw the exception.
5968 //
5969 // But before, move back the object's class into `temp` before
5970 // going into the slow path, as it has been overwritten in the
5971 // meantime.
5972 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005973 GenerateReferenceLoadTwoRegisters(
5974 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005975 __ jmp(type_check_slow_path->GetEntryLabel());
5976
5977 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005978 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005979 __ j(kEqual, &done);
5980 // Same comment as above regarding `temp` and the slow path.
5981 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005982 GenerateReferenceLoadTwoRegisters(
5983 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005984 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005985 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005986 break;
5987 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005988
Calin Juravle98893e12015-10-02 21:05:03 +01005989 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005990 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00005991 NearLabel done;
5992 // Avoid null check if we know obj is not null.
5993 if (instruction->MustDoNullCheck()) {
5994 __ testl(obj, obj);
5995 __ j(kEqual, &done);
5996 }
5997
5998 // /* HeapReference<Class> */ temp = obj->klass_
5999 GenerateReferenceLoadTwoRegisters(
6000 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6001
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006002 // We always go into the type check slow path for the unresolved
6003 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006004 //
6005 // We cannot directly call the CheckCast runtime entry point
6006 // without resorting to a type checking slow path here (i.e. by
6007 // calling InvokeRuntime directly), as it would require to
6008 // assign fixed registers for the inputs of this HInstanceOf
6009 // instruction (following the runtime calling convention), which
6010 // might be cluttered by the potential first read barrier
6011 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006012 //
6013 // TODO: Introduce a new runtime entry point taking the object
6014 // to test (instead of its class) as argument, and let it deal
6015 // with the read barrier issues. This will let us refactor this
6016 // case of the `switch` code as it was previously (with a direct
6017 // call to the runtime not using a type checking slow path).
6018 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006019 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006020 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006021 break;
6022 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006023
Roland Levillain0d5a2812015-11-13 10:07:31 +00006024 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006025}
6026
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006027void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6028 LocationSummary* locations =
6029 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
6030 InvokeRuntimeCallingConvention calling_convention;
6031 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6032}
6033
6034void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006035 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6036 : QUICK_ENTRY_POINT(pUnlockObject),
6037 instruction,
6038 instruction->GetDexPc(),
6039 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006040 if (instruction->IsEnter()) {
6041 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6042 } else {
6043 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6044 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006045}
6046
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006047void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6048void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6049void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6050
6051void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6052 LocationSummary* locations =
6053 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6054 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6055 || instruction->GetResultType() == Primitive::kPrimLong);
6056 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006057 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006058 locations->SetOut(Location::SameAsFirstInput());
6059}
6060
6061void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6062 HandleBitwiseOperation(instruction);
6063}
6064
6065void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6066 HandleBitwiseOperation(instruction);
6067}
6068
6069void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6070 HandleBitwiseOperation(instruction);
6071}
6072
6073void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6074 LocationSummary* locations = instruction->GetLocations();
6075 Location first = locations->InAt(0);
6076 Location second = locations->InAt(1);
6077 DCHECK(first.Equals(locations->Out()));
6078
6079 if (instruction->GetResultType() == Primitive::kPrimInt) {
6080 if (second.IsRegister()) {
6081 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006082 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006083 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006084 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006085 } else {
6086 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006087 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006088 }
6089 } else if (second.IsConstant()) {
6090 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6091 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006092 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006093 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006094 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006095 } else {
6096 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006097 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006098 }
6099 } else {
6100 Address address(CpuRegister(RSP), second.GetStackIndex());
6101 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006102 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006103 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006104 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006105 } else {
6106 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006107 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006108 }
6109 }
6110 } else {
6111 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006112 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6113 bool second_is_constant = false;
6114 int64_t value = 0;
6115 if (second.IsConstant()) {
6116 second_is_constant = true;
6117 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006118 }
Mark Mendell40741f32015-04-20 22:10:34 -04006119 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006120
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006121 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006122 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006123 if (is_int32_value) {
6124 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6125 } else {
6126 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6127 }
6128 } else if (second.IsDoubleStackSlot()) {
6129 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006130 } else {
6131 __ andq(first_reg, second.AsRegister<CpuRegister>());
6132 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006133 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006134 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006135 if (is_int32_value) {
6136 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6137 } else {
6138 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6139 }
6140 } else if (second.IsDoubleStackSlot()) {
6141 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006142 } else {
6143 __ orq(first_reg, second.AsRegister<CpuRegister>());
6144 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006145 } else {
6146 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006147 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006148 if (is_int32_value) {
6149 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6150 } else {
6151 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6152 }
6153 } else if (second.IsDoubleStackSlot()) {
6154 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006155 } else {
6156 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6157 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006158 }
6159 }
6160}
6161
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006162void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6163 Location out,
6164 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006165 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006166 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6167 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006168 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006169 if (kUseBakerReadBarrier) {
6170 // Load with fast path based Baker's read barrier.
6171 // /* HeapReference<Object> */ out = *(out + offset)
6172 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006173 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006174 } else {
6175 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006176 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006177 // in the following move operation, as we will need it for the
6178 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006179 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006180 // /* HeapReference<Object> */ out = *(out + offset)
6181 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006182 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006183 }
6184 } else {
6185 // Plain load with no read barrier.
6186 // /* HeapReference<Object> */ out = *(out + offset)
6187 __ movl(out_reg, Address(out_reg, offset));
6188 __ MaybeUnpoisonHeapReference(out_reg);
6189 }
6190}
6191
6192void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6193 Location out,
6194 Location obj,
6195 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006196 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006197 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6198 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6199 if (kEmitCompilerReadBarrier) {
6200 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006201 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006202 // Load with fast path based Baker's read barrier.
6203 // /* HeapReference<Object> */ out = *(obj + offset)
6204 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006205 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006206 } else {
6207 // Load with slow path based read barrier.
6208 // /* HeapReference<Object> */ out = *(obj + offset)
6209 __ movl(out_reg, Address(obj_reg, offset));
6210 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6211 }
6212 } else {
6213 // Plain load with no read barrier.
6214 // /* HeapReference<Object> */ out = *(obj + offset)
6215 __ movl(out_reg, Address(obj_reg, offset));
6216 __ MaybeUnpoisonHeapReference(out_reg);
6217 }
6218}
6219
6220void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6221 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006222 const Address& address,
6223 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006224 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6225 if (kEmitCompilerReadBarrier) {
6226 if (kUseBakerReadBarrier) {
6227 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6228 // Baker's read barrier are used:
6229 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006230 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006231 // if (Thread::Current()->GetIsGcMarking()) {
6232 // root = ReadBarrier::Mark(root)
6233 // }
6234
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006235 // /* GcRoot<mirror::Object> */ root = *address
6236 __ movl(root_reg, address);
6237 if (fixup_label != nullptr) {
6238 __ Bind(fixup_label);
6239 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006240 static_assert(
6241 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6242 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6243 "have different sizes.");
6244 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6245 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6246 "have different sizes.");
6247
6248 // Slow path used to mark the GC root `root`.
6249 SlowPathCode* slow_path =
6250 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6251 codegen_->AddSlowPath(slow_path);
6252
6253 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6254 /* no_rip */ true),
6255 Immediate(0));
6256 __ j(kNotEqual, slow_path->GetEntryLabel());
6257 __ Bind(slow_path->GetExitLabel());
6258 } else {
6259 // GC root loaded through a slow path for read barriers other
6260 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006261 // /* GcRoot<mirror::Object>* */ root = address
6262 __ leaq(root_reg, address);
6263 if (fixup_label != nullptr) {
6264 __ Bind(fixup_label);
6265 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006266 // /* mirror::Object* */ root = root->Read()
6267 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6268 }
6269 } else {
6270 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006271 // /* GcRoot<mirror::Object> */ root = *address
6272 __ movl(root_reg, address);
6273 if (fixup_label != nullptr) {
6274 __ Bind(fixup_label);
6275 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006276 // Note that GC roots are not affected by heap poisoning, thus we
6277 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006278 }
6279}
6280
6281void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6282 Location ref,
6283 CpuRegister obj,
6284 uint32_t offset,
6285 Location temp,
6286 bool needs_null_check) {
6287 DCHECK(kEmitCompilerReadBarrier);
6288 DCHECK(kUseBakerReadBarrier);
6289
6290 // /* HeapReference<Object> */ ref = *(obj + offset)
6291 Address src(obj, offset);
6292 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6293}
6294
6295void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6296 Location ref,
6297 CpuRegister obj,
6298 uint32_t data_offset,
6299 Location index,
6300 Location temp,
6301 bool needs_null_check) {
6302 DCHECK(kEmitCompilerReadBarrier);
6303 DCHECK(kUseBakerReadBarrier);
6304
6305 // /* HeapReference<Object> */ ref =
6306 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6307 Address src = index.IsConstant() ?
6308 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6309 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6310 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6311}
6312
6313void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6314 Location ref,
6315 CpuRegister obj,
6316 const Address& src,
6317 Location temp,
6318 bool needs_null_check) {
6319 DCHECK(kEmitCompilerReadBarrier);
6320 DCHECK(kUseBakerReadBarrier);
6321
6322 // In slow path based read barriers, the read barrier call is
6323 // inserted after the original load. However, in fast path based
6324 // Baker's read barriers, we need to perform the load of
6325 // mirror::Object::monitor_ *before* the original reference load.
6326 // This load-load ordering is required by the read barrier.
6327 // The fast path/slow path (for Baker's algorithm) should look like:
6328 //
6329 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6330 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6331 // HeapReference<Object> ref = *src; // Original reference load.
6332 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6333 // if (is_gray) {
6334 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6335 // }
6336 //
6337 // Note: the original implementation in ReadBarrier::Barrier is
6338 // slightly more complex as:
6339 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006340 // the high-bits of rb_state, which are expected to be all zeroes
6341 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6342 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006343 // - it performs additional checks that we do not do here for
6344 // performance reasons.
6345
6346 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6347 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6348 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6349
6350 // /* int32_t */ monitor = obj->monitor_
6351 __ movl(temp_reg, Address(obj, monitor_offset));
6352 if (needs_null_check) {
6353 MaybeRecordImplicitNullCheck(instruction);
6354 }
6355 // /* LockWord */ lock_word = LockWord(monitor)
6356 static_assert(sizeof(LockWord) == sizeof(int32_t),
6357 "art::LockWord and int32_t have different sizes.");
6358 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6359 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6360 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6361 static_assert(
6362 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6363 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6364
6365 // Load fence to prevent load-load reordering.
6366 // Note that this is a no-op, thanks to the x86-64 memory model.
6367 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6368
6369 // The actual reference load.
6370 // /* HeapReference<Object> */ ref = *src
6371 __ movl(ref_reg, src);
6372
6373 // Object* ref = ref_addr->AsMirrorPtr()
6374 __ MaybeUnpoisonHeapReference(ref_reg);
6375
6376 // Slow path used to mark the object `ref` when it is gray.
6377 SlowPathCode* slow_path =
6378 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6379 AddSlowPath(slow_path);
6380
6381 // if (rb_state == ReadBarrier::gray_ptr_)
6382 // ref = ReadBarrier::Mark(ref);
6383 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6384 __ j(kEqual, slow_path->GetEntryLabel());
6385 __ Bind(slow_path->GetExitLabel());
6386}
6387
6388void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6389 Location out,
6390 Location ref,
6391 Location obj,
6392 uint32_t offset,
6393 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006394 DCHECK(kEmitCompilerReadBarrier);
6395
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006396 // Insert a slow path based read barrier *after* the reference load.
6397 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006398 // If heap poisoning is enabled, the unpoisoning of the loaded
6399 // reference will be carried out by the runtime within the slow
6400 // path.
6401 //
6402 // Note that `ref` currently does not get unpoisoned (when heap
6403 // poisoning is enabled), which is alright as the `ref` argument is
6404 // not used by the artReadBarrierSlow entry point.
6405 //
6406 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6407 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6408 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6409 AddSlowPath(slow_path);
6410
Roland Levillain0d5a2812015-11-13 10:07:31 +00006411 __ jmp(slow_path->GetEntryLabel());
6412 __ Bind(slow_path->GetExitLabel());
6413}
6414
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006415void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6416 Location out,
6417 Location ref,
6418 Location obj,
6419 uint32_t offset,
6420 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006421 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006422 // Baker's read barriers shall be handled by the fast path
6423 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6424 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006425 // If heap poisoning is enabled, unpoisoning will be taken care of
6426 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006427 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006428 } else if (kPoisonHeapReferences) {
6429 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6430 }
6431}
6432
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006433void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6434 Location out,
6435 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006436 DCHECK(kEmitCompilerReadBarrier);
6437
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006438 // Insert a slow path based read barrier *after* the GC root load.
6439 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006440 // Note that GC roots are not affected by heap poisoning, so we do
6441 // not need to do anything special for this here.
6442 SlowPathCode* slow_path =
6443 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6444 AddSlowPath(slow_path);
6445
Roland Levillain0d5a2812015-11-13 10:07:31 +00006446 __ jmp(slow_path->GetEntryLabel());
6447 __ Bind(slow_path->GetExitLabel());
6448}
6449
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006450void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006451 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006452 LOG(FATAL) << "Unreachable";
6453}
6454
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006455void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006456 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006457 LOG(FATAL) << "Unreachable";
6458}
6459
Mark Mendellfe57faa2015-09-18 09:26:15 -04006460// Simple implementation of packed switch - generate cascaded compare/jumps.
6461void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6462 LocationSummary* locations =
6463 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6464 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006465 locations->AddTemp(Location::RequiresRegister());
6466 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006467}
6468
6469void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6470 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006471 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006472 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006473 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6474 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6475 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006476 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6477
6478 // Should we generate smaller inline compare/jumps?
6479 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6480 // Figure out the correct compare values and jump conditions.
6481 // Handle the first compare/branch as a special case because it might
6482 // jump to the default case.
6483 DCHECK_GT(num_entries, 2u);
6484 Condition first_condition;
6485 uint32_t index;
6486 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6487 if (lower_bound != 0) {
6488 first_condition = kLess;
6489 __ cmpl(value_reg_in, Immediate(lower_bound));
6490 __ j(first_condition, codegen_->GetLabelOf(default_block));
6491 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6492
6493 index = 1;
6494 } else {
6495 // Handle all the compare/jumps below.
6496 first_condition = kBelow;
6497 index = 0;
6498 }
6499
6500 // Handle the rest of the compare/jumps.
6501 for (; index + 1 < num_entries; index += 2) {
6502 int32_t compare_to_value = lower_bound + index + 1;
6503 __ cmpl(value_reg_in, Immediate(compare_to_value));
6504 // Jump to successors[index] if value < case_value[index].
6505 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6506 // Jump to successors[index + 1] if value == case_value[index + 1].
6507 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6508 }
6509
6510 if (index != num_entries) {
6511 // There are an odd number of entries. Handle the last one.
6512 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006513 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006514 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6515 }
6516
6517 // And the default for any other value.
6518 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6519 __ jmp(codegen_->GetLabelOf(default_block));
6520 }
6521 return;
6522 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006523
6524 // Remove the bias, if needed.
6525 Register value_reg_out = value_reg_in.AsRegister();
6526 if (lower_bound != 0) {
6527 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6528 value_reg_out = temp_reg.AsRegister();
6529 }
6530 CpuRegister value_reg(value_reg_out);
6531
6532 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006533 __ cmpl(value_reg, Immediate(num_entries - 1));
6534 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006535
Mark Mendell9c86b482015-09-18 13:36:07 -04006536 // We are in the range of the table.
6537 // Load the address of the jump table in the constant area.
6538 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006539
Mark Mendell9c86b482015-09-18 13:36:07 -04006540 // Load the (signed) offset from the jump table.
6541 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6542
6543 // Add the offset to the address of the table base.
6544 __ addq(temp_reg, base_reg);
6545
6546 // And jump.
6547 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006548}
6549
Aart Bikc5d47542016-01-27 17:00:35 -08006550void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6551 if (value == 0) {
6552 __ xorl(dest, dest);
6553 } else {
6554 __ movl(dest, Immediate(value));
6555 }
6556}
6557
Mark Mendell92e83bf2015-05-07 11:25:03 -04006558void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6559 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006560 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006561 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006562 } else if (IsUint<32>(value)) {
6563 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006564 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6565 } else {
6566 __ movq(dest, Immediate(value));
6567 }
6568}
6569
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006570void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6571 if (value == 0) {
6572 __ xorps(dest, dest);
6573 } else {
6574 __ movss(dest, LiteralInt32Address(value));
6575 }
6576}
6577
6578void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6579 if (value == 0) {
6580 __ xorpd(dest, dest);
6581 } else {
6582 __ movsd(dest, LiteralInt64Address(value));
6583 }
6584}
6585
6586void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6587 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6588}
6589
6590void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6591 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6592}
6593
Aart Bika19616e2016-02-01 18:57:58 -08006594void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6595 if (value == 0) {
6596 __ testl(dest, dest);
6597 } else {
6598 __ cmpl(dest, Immediate(value));
6599 }
6600}
6601
6602void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6603 if (IsInt<32>(value)) {
6604 if (value == 0) {
6605 __ testq(dest, dest);
6606 } else {
6607 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6608 }
6609 } else {
6610 // Value won't fit in an int.
6611 __ cmpq(dest, LiteralInt64Address(value));
6612 }
6613}
6614
Mark Mendellcfa410b2015-05-25 16:02:44 -04006615void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6616 DCHECK(dest.IsDoubleStackSlot());
6617 if (IsInt<32>(value)) {
6618 // Can move directly as an int32 constant.
6619 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6620 Immediate(static_cast<int32_t>(value)));
6621 } else {
6622 Load64BitValue(CpuRegister(TMP), value);
6623 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6624 }
6625}
6626
Mark Mendell9c86b482015-09-18 13:36:07 -04006627/**
6628 * Class to handle late fixup of offsets into constant area.
6629 */
6630class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6631 public:
6632 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6633 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6634
6635 protected:
6636 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6637
6638 CodeGeneratorX86_64* codegen_;
6639
6640 private:
6641 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6642 // Patch the correct offset for the instruction. We use the address of the
6643 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6644 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6645 int32_t relative_position = constant_offset - pos;
6646
6647 // Patch in the right value.
6648 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6649 }
6650
6651 // Location in constant area that the fixup refers to.
6652 size_t offset_into_constant_area_;
6653};
6654
6655/**
6656 t * Class to handle late fixup of offsets to a jump table that will be created in the
6657 * constant area.
6658 */
6659class JumpTableRIPFixup : public RIPFixup {
6660 public:
6661 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6662 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6663
6664 void CreateJumpTable() {
6665 X86_64Assembler* assembler = codegen_->GetAssembler();
6666
6667 // Ensure that the reference to the jump table has the correct offset.
6668 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6669 SetOffset(offset_in_constant_table);
6670
6671 // Compute the offset from the start of the function to this jump table.
6672 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6673
6674 // Populate the jump table with the correct values for the jump table.
6675 int32_t num_entries = switch_instr_->GetNumEntries();
6676 HBasicBlock* block = switch_instr_->GetBlock();
6677 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6678 // The value that we want is the target offset - the position of the table.
6679 for (int32_t i = 0; i < num_entries; i++) {
6680 HBasicBlock* b = successors[i];
6681 Label* l = codegen_->GetLabelOf(b);
6682 DCHECK(l->IsBound());
6683 int32_t offset_to_block = l->Position() - current_table_offset;
6684 assembler->AppendInt32(offset_to_block);
6685 }
6686 }
6687
6688 private:
6689 const HPackedSwitch* switch_instr_;
6690};
6691
Mark Mendellf55c3e02015-03-26 21:07:46 -04006692void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6693 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006694 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006695 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6696 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006697 assembler->Align(4, 0);
6698 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006699
6700 // Populate any jump tables.
6701 for (auto jump_table : fixups_to_jump_tables_) {
6702 jump_table->CreateJumpTable();
6703 }
6704
6705 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006706 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006707 }
6708
6709 // And finish up.
6710 CodeGenerator::Finalize(allocator);
6711}
6712
Mark Mendellf55c3e02015-03-26 21:07:46 -04006713Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6714 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6715 return Address::RIP(fixup);
6716}
6717
6718Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6719 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6720 return Address::RIP(fixup);
6721}
6722
6723Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6724 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6725 return Address::RIP(fixup);
6726}
6727
6728Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6729 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6730 return Address::RIP(fixup);
6731}
6732
Andreas Gampe85b62f22015-09-09 13:15:38 -07006733// TODO: trg as memory.
6734void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6735 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006736 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006737 return;
6738 }
6739
6740 DCHECK_NE(type, Primitive::kPrimVoid);
6741
6742 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6743 if (trg.Equals(return_loc)) {
6744 return;
6745 }
6746
6747 // Let the parallel move resolver take care of all of this.
6748 HParallelMove parallel_move(GetGraph()->GetArena());
6749 parallel_move.AddMove(return_loc, trg, type, nullptr);
6750 GetMoveResolver()->EmitNativeCode(&parallel_move);
6751}
6752
Mark Mendell9c86b482015-09-18 13:36:07 -04006753Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6754 // Create a fixup to be used to create and address the jump table.
6755 JumpTableRIPFixup* table_fixup =
6756 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6757
6758 // We have to populate the jump tables.
6759 fixups_to_jump_tables_.push_back(table_fixup);
6760 return Address::RIP(table_fixup);
6761}
6762
Mark Mendellea5af682015-10-22 17:35:49 -04006763void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6764 const Address& addr_high,
6765 int64_t v,
6766 HInstruction* instruction) {
6767 if (IsInt<32>(v)) {
6768 int32_t v_32 = v;
6769 __ movq(addr_low, Immediate(v_32));
6770 MaybeRecordImplicitNullCheck(instruction);
6771 } else {
6772 // Didn't fit in a register. Do it in pieces.
6773 int32_t low_v = Low32Bits(v);
6774 int32_t high_v = High32Bits(v);
6775 __ movl(addr_low, Immediate(low_v));
6776 MaybeRecordImplicitNullCheck(instruction);
6777 __ movl(addr_high, Immediate(high_v));
6778 }
6779}
6780
Roland Levillain4d027112015-07-01 15:41:14 +01006781#undef __
6782
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006783} // namespace x86_64
6784} // namespace art