blob: 9a8edb36667c2727859b51d74f5038e60379d553 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain62a46b22015-06-01 18:24:13 +010054#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())->
Calin Juravle175dc732015-08-25 15:42:32 +010055#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010056
Andreas Gampe85b62f22015-09-09 13:15:38 -070057class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010058 public:
Nicolas Geoffray39468442014-09-02 15:17:15 +010059 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : instruction_(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060
Alexandre Rames2ed20af2015-03-06 13:55:35 +000061 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000062 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000064 if (instruction_->CanThrowIntoCatchBlock()) {
65 // Live registers will be restored in the catch block if caught.
66 SaveLiveRegisters(codegen, instruction_->GetLocations());
67 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000068 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
69 instruction_,
70 instruction_->GetDexPc(),
71 this);
Roland Levillain888d0672015-11-23 18:53:50 +000072 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 }
74
Alexandre Rames8158f282015-08-07 10:26:17 +010075 bool IsFatal() const OVERRIDE { return true; }
76
Alexandre Rames9931f312015-06-19 14:47:01 +010077 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
78
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +010080 HNullCheck* const instruction_;
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
86 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : instruction_(instruction) {}
87
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
107 HDivZeroCheck* const instruction_;
108 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
109};
110
Andreas Gampe85b62f22015-09-09 13:15:38 -0700111class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000112 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100113 DivRemMinusOneSlowPathX86_64(Register reg, Primitive::Type type, bool is_div)
Calin Juravlebacfec32014-11-14 15:54:36 +0000114 : cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000115
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000116 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000117 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000118 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negl(cpu_reg_);
121 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
124
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000125 } else {
126 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 if (is_div_) {
128 __ negq(cpu_reg_);
129 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400130 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 }
Calin Juravled0d48522014-11-04 16:40:20 +0000133 __ jmp(GetExitLabel());
134 }
135
Alexandre Rames9931f312015-06-19 14:47:01 +0100136 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
137
Calin Juravled0d48522014-11-04 16:40:20 +0000138 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000139 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000140 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000141 const bool is_div_;
142 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000143};
144
Andreas Gampe85b62f22015-09-09 13:15:38 -0700145class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000146 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100147 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100148 : instruction_(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000149
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000150 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000151 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000153 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000154 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
155 instruction_,
156 instruction_->GetDexPc(),
157 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000158 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000159 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100160 if (successor_ == nullptr) {
161 __ jmp(GetReturnLabel());
162 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000163 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100164 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 }
166
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100167 Label* GetReturnLabel() {
168 DCHECK(successor_ == nullptr);
169 return &return_label_;
170 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000171
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100172 HBasicBlock* GetSuccessor() const {
173 return successor_;
174 }
175
Alexandre Rames9931f312015-06-19 14:47:01 +0100176 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
177
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000178 private:
179 HSuspendCheck* const instruction_;
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100180 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000181 Label return_label_;
182
183 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
184};
185
Andreas Gampe85b62f22015-09-09 13:15:38 -0700186class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100187 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100188 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
189 : instruction_(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000191 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100192 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000193 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100194 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000195 if (instruction_->CanThrowIntoCatchBlock()) {
196 // Live registers will be restored in the catch block if caught.
197 SaveLiveRegisters(codegen, instruction_->GetLocations());
198 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000199 // We're moving two locations to locations that could overlap, so we need a parallel
200 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100201 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000202 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100203 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000204 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100205 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100206 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100207 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
208 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000209 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
210 instruction_,
211 instruction_->GetDexPc(),
212 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000213 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100214 }
215
Alexandre Rames8158f282015-08-07 10:26:17 +0100216 bool IsFatal() const OVERRIDE { return true; }
217
Alexandre Rames9931f312015-06-19 14:47:01 +0100218 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
219
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100220 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +0100221 HBoundsCheck* const instruction_;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100222
223 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
224};
225
Andreas Gampe85b62f22015-09-09 13:15:38 -0700226class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100227 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000228 LoadClassSlowPathX86_64(HLoadClass* cls,
229 HInstruction* at,
230 uint32_t dex_pc,
231 bool do_clinit)
232 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
233 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
234 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000236 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000237 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000238 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000241 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100243 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000244 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000245 x86_64_codegen->InvokeRuntime(do_clinit_ ?
246 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
247 QUICK_ENTRY_POINT(pInitializeType),
248 at_,
249 dex_pc_,
250 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000251 if (do_clinit_) {
252 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
253 } else {
254 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000257 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000259 if (out.IsValid()) {
260 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000261 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 }
263
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ jmp(GetExitLabel());
266 }
267
Alexandre Rames9931f312015-06-19 14:47:01 +0100268 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
269
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100270 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 // The class this slow path will load.
272 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The instruction where this slow path is happening.
275 // (Might be the load class or an initialization check).
276 HInstruction* const at_;
277
278 // The dex PC of `at_`.
279 const uint32_t dex_pc_;
280
281 // Whether to initialize the class.
282 const bool do_clinit_;
283
284 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285};
286
Andreas Gampe85b62f22015-09-09 13:15:38 -0700287class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 public:
289 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : instruction_(instruction) {}
290
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 LocationSummary* locations = instruction_->GetLocations();
293 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
294
Roland Levillain0d5a2812015-11-13 10:07:31 +0000295 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000296 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000297 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000298
299 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000301 Immediate(instruction_->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000302 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
303 instruction_,
304 instruction_->GetDexPc(),
305 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000306 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000307 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000308 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 __ jmp(GetExitLabel());
310 }
311
Alexandre Rames9931f312015-06-19 14:47:01 +0100312 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
313
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000314 private:
315 HLoadString* const instruction_;
316
317 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
318};
319
Andreas Gampe85b62f22015-09-09 13:15:38 -0700320class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000321 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000322 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
323 : instruction_(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000324
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000325 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100327 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
328 : locations->Out();
329 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000330 DCHECK(instruction_->IsCheckCast()
331 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
Roland Levillain0d5a2812015-11-13 10:07:31 +0000333 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000334 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000335
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000336 if (!is_fatal_) {
337 SaveLiveRegisters(codegen, locations);
338 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
340 // We're moving two locations to locations that could overlap, so we need a parallel
341 // move resolver.
342 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000343 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100344 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000345 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100346 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100347 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100348 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
349 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000350
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000351 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000352 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
353 instruction_,
354 dex_pc,
355 this);
356 CheckEntrypointTypes<
357 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000358 } else {
359 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000360 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
361 instruction_,
362 dex_pc,
363 this);
364 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000365 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000367 if (!is_fatal_) {
368 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000369 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 RestoreLiveRegisters(codegen, locations);
373 __ jmp(GetExitLabel());
374 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000375 }
376
Alexandre Rames9931f312015-06-19 14:47:01 +0100377 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
378
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000379 bool IsFatal() const OVERRIDE { return is_fatal_; }
380
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381 private:
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000382 HInstruction* const instruction_;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000384
385 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
386};
387
Andreas Gampe85b62f22015-09-09 13:15:38 -0700388class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389 public:
Aart Bik42249c32016-01-07 15:33:50 -0800390 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700391 : instruction_(instruction) {}
392
393 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000394 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 __ Bind(GetEntryLabel());
396 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000397 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800398 instruction_,
399 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000400 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000401 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700402 }
403
Alexandre Rames9931f312015-06-19 14:47:01 +0100404 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
405
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406 private:
Aart Bik42249c32016-01-07 15:33:50 -0800407 HDeoptimize* const instruction_;
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700408 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
409};
410
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100411class ArraySetSlowPathX86_64 : public SlowPathCode {
412 public:
413 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : instruction_(instruction) {}
414
415 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
416 LocationSummary* locations = instruction_->GetLocations();
417 __ Bind(GetEntryLabel());
418 SaveLiveRegisters(codegen, locations);
419
420 InvokeRuntimeCallingConvention calling_convention;
421 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
422 parallel_move.AddMove(
423 locations->InAt(0),
424 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
425 Primitive::kPrimNot,
426 nullptr);
427 parallel_move.AddMove(
428 locations->InAt(1),
429 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
430 Primitive::kPrimInt,
431 nullptr);
432 parallel_move.AddMove(
433 locations->InAt(2),
434 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
435 Primitive::kPrimNot,
436 nullptr);
437 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
438
Roland Levillain0d5a2812015-11-13 10:07:31 +0000439 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
440 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
441 instruction_,
442 instruction_->GetDexPc(),
443 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000444 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100445 RestoreLiveRegisters(codegen, locations);
446 __ jmp(GetExitLabel());
447 }
448
449 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
450
451 private:
452 HInstruction* const instruction_;
453
454 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
455};
456
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000457// Slow path marking an object during a read barrier.
458class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
459 public:
460 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
461 : instruction_(instruction), out_(out), obj_(obj) {
462 DCHECK(kEmitCompilerReadBarrier);
463 }
464
465 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
466
467 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
468 LocationSummary* locations = instruction_->GetLocations();
469 Register reg_out = out_.AsRegister<Register>();
470 DCHECK(locations->CanCall());
471 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
472 DCHECK(instruction_->IsInstanceFieldGet() ||
473 instruction_->IsStaticFieldGet() ||
474 instruction_->IsArrayGet() ||
475 instruction_->IsLoadClass() ||
476 instruction_->IsLoadString() ||
477 instruction_->IsInstanceOf() ||
478 instruction_->IsCheckCast())
479 << "Unexpected instruction in read barrier marking slow path: "
480 << instruction_->DebugName();
481
482 __ Bind(GetEntryLabel());
483 SaveLiveRegisters(codegen, locations);
484
485 InvokeRuntimeCallingConvention calling_convention;
486 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
487 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
488 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
489 instruction_,
490 instruction_->GetDexPc(),
491 this);
492 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
493 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
494
495 RestoreLiveRegisters(codegen, locations);
496 __ jmp(GetExitLabel());
497 }
498
499 private:
500 HInstruction* const instruction_;
501 const Location out_;
502 const Location obj_;
503
504 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
505};
506
Roland Levillain0d5a2812015-11-13 10:07:31 +0000507// Slow path generating a read barrier for a heap reference.
508class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
509 public:
510 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
511 Location out,
512 Location ref,
513 Location obj,
514 uint32_t offset,
515 Location index)
516 : instruction_(instruction),
517 out_(out),
518 ref_(ref),
519 obj_(obj),
520 offset_(offset),
521 index_(index) {
522 DCHECK(kEmitCompilerReadBarrier);
523 // If `obj` is equal to `out` or `ref`, it means the initial
524 // object has been overwritten by (or after) the heap object
525 // reference load to be instrumented, e.g.:
526 //
527 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000528 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000529 //
530 // In that case, we have lost the information about the original
531 // object, and the emitted read barrier cannot work properly.
532 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
533 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
534}
535
536 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
537 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
538 LocationSummary* locations = instruction_->GetLocations();
539 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
540 DCHECK(locations->CanCall());
541 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
542 DCHECK(!instruction_->IsInvoke() ||
543 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000544 instruction_->GetLocations()->Intrinsified()))
545 << "Unexpected instruction in read barrier for heap reference slow path: "
546 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000547
548 __ Bind(GetEntryLabel());
549 SaveLiveRegisters(codegen, locations);
550
551 // We may have to change the index's value, but as `index_` is a
552 // constant member (like other "inputs" of this slow path),
553 // introduce a copy of it, `index`.
554 Location index = index_;
555 if (index_.IsValid()) {
556 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
557 if (instruction_->IsArrayGet()) {
558 // Compute real offset and store it in index_.
559 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
560 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
561 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
562 // We are about to change the value of `index_reg` (see the
563 // calls to art::x86_64::X86_64Assembler::shll and
564 // art::x86_64::X86_64Assembler::AddImmediate below), but it
565 // has not been saved by the previous call to
566 // art::SlowPathCode::SaveLiveRegisters, as it is a
567 // callee-save register --
568 // art::SlowPathCode::SaveLiveRegisters does not consider
569 // callee-save registers, as it has been designed with the
570 // assumption that callee-save registers are supposed to be
571 // handled by the called function. So, as a callee-save
572 // register, `index_reg` _would_ eventually be saved onto
573 // the stack, but it would be too late: we would have
574 // changed its value earlier. Therefore, we manually save
575 // it here into another freely available register,
576 // `free_reg`, chosen of course among the caller-save
577 // registers (as a callee-save `free_reg` register would
578 // exhibit the same problem).
579 //
580 // Note we could have requested a temporary register from
581 // the register allocator instead; but we prefer not to, as
582 // this is a slow path, and we know we can find a
583 // caller-save register that is available.
584 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
585 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
586 index_reg = free_reg;
587 index = Location::RegisterLocation(index_reg);
588 } else {
589 // The initial register stored in `index_` has already been
590 // saved in the call to art::SlowPathCode::SaveLiveRegisters
591 // (as it is not a callee-save register), so we can freely
592 // use it.
593 }
594 // Shifting the index value contained in `index_reg` by the
595 // scale factor (2) cannot overflow in practice, as the
596 // runtime is unable to allocate object arrays with a size
597 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
598 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
599 static_assert(
600 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
601 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
602 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
603 } else {
604 DCHECK(instruction_->IsInvoke());
605 DCHECK(instruction_->GetLocations()->Intrinsified());
606 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
607 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
608 << instruction_->AsInvoke()->GetIntrinsic();
609 DCHECK_EQ(offset_, 0U);
610 DCHECK(index_.IsRegister());
611 }
612 }
613
614 // We're moving two or three locations to locations that could
615 // overlap, so we need a parallel move resolver.
616 InvokeRuntimeCallingConvention calling_convention;
617 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
618 parallel_move.AddMove(ref_,
619 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
620 Primitive::kPrimNot,
621 nullptr);
622 parallel_move.AddMove(obj_,
623 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
624 Primitive::kPrimNot,
625 nullptr);
626 if (index.IsValid()) {
627 parallel_move.AddMove(index,
628 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
629 Primitive::kPrimInt,
630 nullptr);
631 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
632 } else {
633 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
634 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
635 }
636 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
637 instruction_,
638 instruction_->GetDexPc(),
639 this);
640 CheckEntrypointTypes<
641 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
642 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
643
644 RestoreLiveRegisters(codegen, locations);
645 __ jmp(GetExitLabel());
646 }
647
648 const char* GetDescription() const OVERRIDE {
649 return "ReadBarrierForHeapReferenceSlowPathX86_64";
650 }
651
652 private:
653 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
654 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
655 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
656 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
657 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
658 return static_cast<CpuRegister>(i);
659 }
660 }
661 // We shall never fail to find a free caller-save register, as
662 // there are more than two core caller-save registers on x86-64
663 // (meaning it is possible to find one which is different from
664 // `ref` and `obj`).
665 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
666 LOG(FATAL) << "Could not find a free caller-save register";
667 UNREACHABLE();
668 }
669
670 HInstruction* const instruction_;
671 const Location out_;
672 const Location ref_;
673 const Location obj_;
674 const uint32_t offset_;
675 // An additional location containing an index to an array.
676 // Only used for HArrayGet and the UnsafeGetObject &
677 // UnsafeGetObjectVolatile intrinsics.
678 const Location index_;
679
680 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
681};
682
683// Slow path generating a read barrier for a GC root.
684class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
685 public:
686 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000687 : instruction_(instruction), out_(out), root_(root) {
688 DCHECK(kEmitCompilerReadBarrier);
689 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000690
691 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
692 LocationSummary* locations = instruction_->GetLocations();
693 DCHECK(locations->CanCall());
694 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000695 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
696 << "Unexpected instruction in read barrier for GC root slow path: "
697 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000698
699 __ Bind(GetEntryLabel());
700 SaveLiveRegisters(codegen, locations);
701
702 InvokeRuntimeCallingConvention calling_convention;
703 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
704 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
705 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
706 instruction_,
707 instruction_->GetDexPc(),
708 this);
709 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
710 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
711
712 RestoreLiveRegisters(codegen, locations);
713 __ jmp(GetExitLabel());
714 }
715
716 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
717
718 private:
719 HInstruction* const instruction_;
720 const Location out_;
721 const Location root_;
722
723 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
724};
725
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100726#undef __
Roland Levillain62a46b22015-06-01 18:24:13 +0100727#define __ down_cast<X86_64Assembler*>(GetAssembler())->
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100728
Roland Levillain4fa13f62015-07-06 18:11:54 +0100729inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700730 switch (cond) {
731 case kCondEQ: return kEqual;
732 case kCondNE: return kNotEqual;
733 case kCondLT: return kLess;
734 case kCondLE: return kLessEqual;
735 case kCondGT: return kGreater;
736 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700737 case kCondB: return kBelow;
738 case kCondBE: return kBelowEqual;
739 case kCondA: return kAbove;
740 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700741 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100742 LOG(FATAL) << "Unreachable";
743 UNREACHABLE();
744}
745
Aart Bike9f37602015-10-09 11:15:55 -0700746// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100747inline Condition X86_64FPCondition(IfCondition cond) {
748 switch (cond) {
749 case kCondEQ: return kEqual;
750 case kCondNE: return kNotEqual;
751 case kCondLT: return kBelow;
752 case kCondLE: return kBelowEqual;
753 case kCondGT: return kAbove;
754 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700755 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100756 };
757 LOG(FATAL) << "Unreachable";
758 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700759}
760
Vladimir Markodc151b22015-10-15 18:02:30 +0100761HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
762 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
763 MethodReference target_method ATTRIBUTE_UNUSED) {
764 switch (desired_dispatch_info.code_ptr_location) {
765 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
766 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
767 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
768 return HInvokeStaticOrDirect::DispatchInfo {
769 desired_dispatch_info.method_load_kind,
770 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
771 desired_dispatch_info.method_load_data,
772 0u
773 };
774 default:
775 return desired_dispatch_info;
776 }
777}
778
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800779void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100780 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800781 // All registers are assumed to be correctly set up.
782
Vladimir Marko58155012015-08-19 12:49:41 +0000783 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
784 switch (invoke->GetMethodLoadKind()) {
785 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
786 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000787 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000788 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000789 break;
790 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000791 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000792 break;
793 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
794 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
795 break;
796 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
797 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
798 method_patches_.emplace_back(invoke->GetTargetMethod());
799 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
800 break;
801 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000802 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
803 invoke->GetDexCacheArrayOffset());
Vladimir Marko58155012015-08-19 12:49:41 +0000804 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000805 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko58155012015-08-19 12:49:41 +0000806 // Bind the label at the end of the "movl" insn.
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000807 __ Bind(&pc_relative_dex_cache_patches_.back().label);
Vladimir Marko58155012015-08-19 12:49:41 +0000808 break;
809 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000810 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000811 Register method_reg;
812 CpuRegister reg = temp.AsRegister<CpuRegister>();
813 if (current_method.IsRegister()) {
814 method_reg = current_method.AsRegister<Register>();
815 } else {
816 DCHECK(invoke->GetLocations()->Intrinsified());
817 DCHECK(!current_method.IsValid());
818 method_reg = reg.AsRegister();
819 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
820 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000821 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100822 __ movq(reg,
823 Address(CpuRegister(method_reg),
824 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000825 // temp = temp[index_in_cache]
826 uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index;
827 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
828 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100829 }
Vladimir Marko58155012015-08-19 12:49:41 +0000830 }
831
832 switch (invoke->GetCodePtrLocation()) {
833 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
834 __ call(&frame_entry_label_);
835 break;
836 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
837 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
838 Label* label = &relative_call_patches_.back().label;
839 __ call(label); // Bind to the patch label, override at link time.
840 __ Bind(label); // Bind the label at the end of the "call" insn.
841 break;
842 }
843 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
844 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100845 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
846 LOG(FATAL) << "Unsupported";
847 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000848 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
849 // (callee_method + offset_of_quick_compiled_code)()
850 __ call(Address(callee_method.AsRegister<CpuRegister>(),
851 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
852 kX86_64WordSize).SizeValue()));
853 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000854 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800855
856 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800857}
858
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000859void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
860 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
861 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
862 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000863
864 // Use the calling convention instead of the location of the receiver, as
865 // intrinsics may have put the receiver in a different register. In the intrinsics
866 // slow path, the arguments have been moved to the right place, so here we are
867 // guaranteed that the receiver is the first register of the calling convention.
868 InvokeDexCallingConvention calling_convention;
869 Register receiver = calling_convention.GetRegisterAt(0);
870
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000871 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000872 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000873 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000874 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000875 // Instead of simply (possibly) unpoisoning `temp` here, we should
876 // emit a read barrier for the previous class reference load.
877 // However this is not required in practice, as this is an
878 // intermediate/temporary reference and because the current
879 // concurrent copying collector keeps the from-space memory
880 // intact/accessible until the end of the marking phase (the
881 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000882 __ MaybeUnpoisonHeapReference(temp);
883 // temp = temp->GetMethodAt(method_offset);
884 __ movq(temp, Address(temp, method_offset));
885 // call temp->GetEntryPoint();
886 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
887 kX86_64WordSize).SizeValue()));
888}
889
Vladimir Marko58155012015-08-19 12:49:41 +0000890void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
891 DCHECK(linker_patches->empty());
892 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000893 method_patches_.size() +
894 relative_call_patches_.size() +
895 pc_relative_dex_cache_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000896 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000897 // The label points to the end of the "movl" insn but the literal offset for method
898 // patch needs to point to the embedded constant which occupies the last 4 bytes.
899 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000900 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000901 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000902 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
903 info.target_method.dex_file,
904 info.target_method.dex_method_index));
905 }
906 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000907 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000908 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
909 info.target_method.dex_file,
910 info.target_method.dex_method_index));
911 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000912 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
913 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000914 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
915 &info.target_dex_file,
916 info.label.Position(),
917 info.element_offset));
918 }
919}
920
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100921void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100922 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100923}
924
925void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100926 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100927}
928
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100929size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
930 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
931 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100932}
933
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100934size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
935 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
936 return kX86_64WordSize;
937}
938
939size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
940 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
941 return kX86_64WordSize;
942}
943
944size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
945 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
946 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100947}
948
Calin Juravle175dc732015-08-25 15:42:32 +0100949void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
950 HInstruction* instruction,
951 uint32_t dex_pc,
952 SlowPathCode* slow_path) {
953 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
954 instruction,
955 dex_pc,
956 slow_path);
957}
958
959void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100960 HInstruction* instruction,
961 uint32_t dex_pc,
962 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100963 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000964 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100965 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100966}
967
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000968static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000969// Use a fake return address register to mimic Quick.
970static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400971CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000972 const X86_64InstructionSetFeatures& isa_features,
973 const CompilerOptions& compiler_options,
974 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +0000975 : CodeGenerator(graph,
976 kNumberOfCpuRegisters,
977 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000978 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000979 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
980 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000981 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000982 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
983 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100984 compiler_options,
985 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100986 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100987 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000988 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -0400989 move_resolver_(graph->GetArena(), this),
Mark Mendellf55c3e02015-03-26 21:07:46 -0400990 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +0000991 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +0100992 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
993 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000994 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -0400995 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000996 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
997}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100998
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100999InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1000 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001001 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001002 assembler_(codegen->GetAssembler()),
1003 codegen_(codegen) {}
1004
David Brazdil58282f42016-01-14 12:45:10 +00001005void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001006 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001007 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001008
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001009 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001010 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001011}
1012
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001013static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001014 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001015}
David Srbecky9d8606d2015-04-12 09:35:32 +01001016
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001017static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001018 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001019}
1020
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001021void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001022 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001023 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001024 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001025 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001026 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001027
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001028 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001029 __ testq(CpuRegister(RAX), Address(
1030 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001031 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001032 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001033
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001034 if (HasEmptyFrame()) {
1035 return;
1036 }
1037
Nicolas Geoffray98893962015-01-21 12:32:32 +00001038 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001039 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001040 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001041 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001042 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1043 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001044 }
1045 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001046
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001047 int adjust = GetFrameSize() - GetCoreSpillSize();
1048 __ subq(CpuRegister(RSP), Immediate(adjust));
1049 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001050 uint32_t xmm_spill_location = GetFpuSpillStart();
1051 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001052
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001053 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1054 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001055 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1056 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1057 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001058 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001059 }
1060
Mathieu Chartiere401d142015-04-22 13:56:20 -07001061 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001062 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001063}
1064
1065void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001066 __ cfi().RememberState();
1067 if (!HasEmptyFrame()) {
1068 uint32_t xmm_spill_location = GetFpuSpillStart();
1069 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1070 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1071 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1072 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1073 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1074 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1075 }
1076 }
1077
1078 int adjust = GetFrameSize() - GetCoreSpillSize();
1079 __ addq(CpuRegister(RSP), Immediate(adjust));
1080 __ cfi().AdjustCFAOffset(-adjust);
1081
1082 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1083 Register reg = kCoreCalleeSaves[i];
1084 if (allocated_registers_.ContainsCoreRegister(reg)) {
1085 __ popq(CpuRegister(reg));
1086 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1087 __ cfi().Restore(DWARFReg(reg));
1088 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001089 }
1090 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001091 __ ret();
1092 __ cfi().RestoreState();
1093 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001094}
1095
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001096void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1097 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001098}
1099
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001100Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
1101 switch (load->GetType()) {
1102 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001103 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001104 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001105
1106 case Primitive::kPrimInt:
1107 case Primitive::kPrimNot:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001108 case Primitive::kPrimFloat:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001109 return Location::StackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001110
1111 case Primitive::kPrimBoolean:
1112 case Primitive::kPrimByte:
1113 case Primitive::kPrimChar:
1114 case Primitive::kPrimShort:
1115 case Primitive::kPrimVoid:
1116 LOG(FATAL) << "Unexpected type " << load->GetType();
Andreas Gampe65b798e2015-04-06 09:35:22 -07001117 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001118 }
1119
1120 LOG(FATAL) << "Unreachable";
Andreas Gampe65b798e2015-04-06 09:35:22 -07001121 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001122}
1123
1124void CodeGeneratorX86_64::Move(Location destination, Location source) {
1125 if (source.Equals(destination)) {
1126 return;
1127 }
1128 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001129 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001130 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001131 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001132 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001133 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001134 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001135 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1136 } else if (source.IsConstant()) {
1137 HConstant* constant = source.GetConstant();
1138 if (constant->IsLongConstant()) {
1139 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1140 } else {
1141 Load32BitValue(dest, GetInt32ValueOf(constant));
1142 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001143 } else {
1144 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001145 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001146 }
1147 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001148 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001149 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001150 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001151 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001152 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1153 } else if (source.IsConstant()) {
1154 HConstant* constant = source.GetConstant();
1155 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1156 if (constant->IsFloatConstant()) {
1157 Load32BitValue(dest, static_cast<int32_t>(value));
1158 } else {
1159 Load64BitValue(dest, value);
1160 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001161 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001162 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001163 } else {
1164 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001165 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001166 }
1167 } else if (destination.IsStackSlot()) {
1168 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001169 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001170 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001171 } else if (source.IsFpuRegister()) {
1172 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001173 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001174 } else if (source.IsConstant()) {
1175 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001176 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001177 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001178 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001179 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001180 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1181 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001182 }
1183 } else {
1184 DCHECK(destination.IsDoubleStackSlot());
1185 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001186 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001187 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001188 } else if (source.IsFpuRegister()) {
1189 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001190 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001191 } else if (source.IsConstant()) {
1192 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001193 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001194 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001195 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001196 } else {
1197 DCHECK(constant->IsLongConstant());
1198 value = constant->AsLongConstant()->GetValue();
1199 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001200 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001201 } else {
1202 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001203 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1204 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001205 }
1206 }
1207}
1208
Calin Juravle175dc732015-08-25 15:42:32 +01001209void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1210 DCHECK(location.IsRegister());
1211 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1212}
1213
Calin Juravlee460d1d2015-09-29 04:52:17 +01001214void CodeGeneratorX86_64::MoveLocation(
1215 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1216 Move(dst, src);
1217}
1218
1219void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1220 if (location.IsRegister()) {
1221 locations->AddTemp(location);
1222 } else {
1223 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1224 }
1225}
1226
David Brazdilfc6a86a2015-06-26 10:33:45 +00001227void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001228 DCHECK(!successor->IsExitBlock());
1229
1230 HBasicBlock* block = got->GetBlock();
1231 HInstruction* previous = got->GetPrevious();
1232
1233 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001234 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001235 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1236 return;
1237 }
1238
1239 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1240 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1241 }
1242 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001243 __ jmp(codegen_->GetLabelOf(successor));
1244 }
1245}
1246
David Brazdilfc6a86a2015-06-26 10:33:45 +00001247void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1248 got->SetLocations(nullptr);
1249}
1250
1251void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1252 HandleGoto(got, got->GetSuccessor());
1253}
1254
1255void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1256 try_boundary->SetLocations(nullptr);
1257}
1258
1259void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1260 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1261 if (!successor->IsExitBlock()) {
1262 HandleGoto(try_boundary, successor);
1263 }
1264}
1265
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001266void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1267 exit->SetLocations(nullptr);
1268}
1269
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001270void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001271}
1272
Mark Mendell152408f2015-12-31 12:28:50 -05001273template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001274void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001275 LabelType* true_label,
1276 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001277 if (cond->IsFPConditionTrueIfNaN()) {
1278 __ j(kUnordered, true_label);
1279 } else if (cond->IsFPConditionFalseIfNaN()) {
1280 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001281 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001282 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001283}
1284
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001285void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001286 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001287
Mark Mendellc4701932015-04-10 13:18:51 -04001288 Location left = locations->InAt(0);
1289 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001290 Primitive::Type type = condition->InputAt(0)->GetType();
1291 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001292 case Primitive::kPrimBoolean:
1293 case Primitive::kPrimByte:
1294 case Primitive::kPrimChar:
1295 case Primitive::kPrimShort:
1296 case Primitive::kPrimInt:
1297 case Primitive::kPrimNot: {
1298 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1299 if (right.IsConstant()) {
1300 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1301 if (value == 0) {
1302 __ testl(left_reg, left_reg);
1303 } else {
1304 __ cmpl(left_reg, Immediate(value));
1305 }
1306 } else if (right.IsStackSlot()) {
1307 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1308 } else {
1309 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1310 }
1311 break;
1312 }
Mark Mendellc4701932015-04-10 13:18:51 -04001313 case Primitive::kPrimLong: {
1314 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1315 if (right.IsConstant()) {
1316 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001317 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001318 } else if (right.IsDoubleStackSlot()) {
1319 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1320 } else {
1321 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1322 }
Mark Mendellc4701932015-04-10 13:18:51 -04001323 break;
1324 }
1325 case Primitive::kPrimFloat: {
1326 if (right.IsFpuRegister()) {
1327 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1328 } else if (right.IsConstant()) {
1329 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1330 codegen_->LiteralFloatAddress(
1331 right.GetConstant()->AsFloatConstant()->GetValue()));
1332 } else {
1333 DCHECK(right.IsStackSlot());
1334 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1335 Address(CpuRegister(RSP), right.GetStackIndex()));
1336 }
Mark Mendellc4701932015-04-10 13:18:51 -04001337 break;
1338 }
1339 case Primitive::kPrimDouble: {
1340 if (right.IsFpuRegister()) {
1341 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1342 } else if (right.IsConstant()) {
1343 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1344 codegen_->LiteralDoubleAddress(
1345 right.GetConstant()->AsDoubleConstant()->GetValue()));
1346 } else {
1347 DCHECK(right.IsDoubleStackSlot());
1348 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1349 Address(CpuRegister(RSP), right.GetStackIndex()));
1350 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001351 break;
1352 }
1353 default:
1354 LOG(FATAL) << "Unexpected condition type " << type;
1355 }
1356}
1357
1358template<class LabelType>
1359void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1360 LabelType* true_target_in,
1361 LabelType* false_target_in) {
1362 // Generated branching requires both targets to be explicit. If either of the
1363 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1364 LabelType fallthrough_target;
1365 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1366 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1367
1368 // Generate the comparison to set the CC.
1369 GenerateCompareTest(condition);
1370
1371 // Now generate the correct jump(s).
1372 Primitive::Type type = condition->InputAt(0)->GetType();
1373 switch (type) {
1374 case Primitive::kPrimLong: {
1375 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1376 break;
1377 }
1378 case Primitive::kPrimFloat: {
1379 GenerateFPJumps(condition, true_target, false_target);
1380 break;
1381 }
1382 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001383 GenerateFPJumps(condition, true_target, false_target);
1384 break;
1385 }
1386 default:
1387 LOG(FATAL) << "Unexpected condition type " << type;
1388 }
1389
David Brazdil0debae72015-11-12 18:37:00 +00001390 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001391 __ jmp(false_target);
1392 }
David Brazdil0debae72015-11-12 18:37:00 +00001393
1394 if (fallthrough_target.IsLinked()) {
1395 __ Bind(&fallthrough_target);
1396 }
Mark Mendellc4701932015-04-10 13:18:51 -04001397}
1398
David Brazdil0debae72015-11-12 18:37:00 +00001399static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1400 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1401 // are set only strictly before `branch`. We can't use the eflags on long
1402 // conditions if they are materialized due to the complex branching.
1403 return cond->IsCondition() &&
1404 cond->GetNext() == branch &&
1405 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1406}
1407
Mark Mendell152408f2015-12-31 12:28:50 -05001408template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001409void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001410 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001411 LabelType* true_target,
1412 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001413 HInstruction* cond = instruction->InputAt(condition_input_index);
1414
1415 if (true_target == nullptr && false_target == nullptr) {
1416 // Nothing to do. The code always falls through.
1417 return;
1418 } else if (cond->IsIntConstant()) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001419 // Constant condition, statically compared against 1.
David Brazdil0debae72015-11-12 18:37:00 +00001420 if (cond->AsIntConstant()->IsOne()) {
1421 if (true_target != nullptr) {
1422 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001423 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001424 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001425 DCHECK(cond->AsIntConstant()->IsZero());
1426 if (false_target != nullptr) {
1427 __ jmp(false_target);
1428 }
1429 }
1430 return;
1431 }
1432
1433 // The following code generates these patterns:
1434 // (1) true_target == nullptr && false_target != nullptr
1435 // - opposite condition true => branch to false_target
1436 // (2) true_target != nullptr && false_target == nullptr
1437 // - condition true => branch to true_target
1438 // (3) true_target != nullptr && false_target != nullptr
1439 // - condition true => branch to true_target
1440 // - branch to false_target
1441 if (IsBooleanValueOrMaterializedCondition(cond)) {
1442 if (AreEflagsSetFrom(cond, instruction)) {
1443 if (true_target == nullptr) {
1444 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1445 } else {
1446 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1447 }
1448 } else {
1449 // Materialized condition, compare against 0.
1450 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1451 if (lhs.IsRegister()) {
1452 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1453 } else {
1454 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1455 }
1456 if (true_target == nullptr) {
1457 __ j(kEqual, false_target);
1458 } else {
1459 __ j(kNotEqual, true_target);
1460 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001461 }
1462 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001463 // Condition has not been materialized, use its inputs as the
1464 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001465 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001466
David Brazdil0debae72015-11-12 18:37:00 +00001467 // If this is a long or FP comparison that has been folded into
1468 // the HCondition, generate the comparison directly.
1469 Primitive::Type type = condition->InputAt(0)->GetType();
1470 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1471 GenerateCompareTestAndBranch(condition, true_target, false_target);
1472 return;
1473 }
1474
1475 Location lhs = condition->GetLocations()->InAt(0);
1476 Location rhs = condition->GetLocations()->InAt(1);
1477 if (rhs.IsRegister()) {
1478 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1479 } else if (rhs.IsConstant()) {
1480 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001481 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001482 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001483 __ cmpl(lhs.AsRegister<CpuRegister>(),
1484 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1485 }
1486 if (true_target == nullptr) {
1487 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1488 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001489 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001490 }
Dave Allison20dfc792014-06-16 20:44:29 -07001491 }
David Brazdil0debae72015-11-12 18:37:00 +00001492
1493 // If neither branch falls through (case 3), the conditional branch to `true_target`
1494 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1495 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001496 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001497 }
1498}
1499
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001500void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001501 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1502 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001503 locations->SetInAt(0, Location::Any());
1504 }
1505}
1506
1507void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001508 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1509 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1510 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1511 nullptr : codegen_->GetLabelOf(true_successor);
1512 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1513 nullptr : codegen_->GetLabelOf(false_successor);
1514 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001515}
1516
1517void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1518 LocationSummary* locations = new (GetGraph()->GetArena())
1519 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001520 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001521 locations->SetInAt(0, Location::Any());
1522 }
1523}
1524
1525void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001526 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001527 GenerateTestAndBranch<Label>(deoptimize,
1528 /* condition_input_index */ 0,
1529 slow_path->GetEntryLabel(),
1530 /* false_target */ nullptr);
1531}
1532
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001533static bool SelectCanUseCMOV(HSelect* select) {
1534 // There are no conditional move instructions for XMMs.
1535 if (Primitive::IsFloatingPointType(select->GetType())) {
1536 return false;
1537 }
1538
1539 // A FP condition doesn't generate the single CC that we need.
1540 HInstruction* condition = select->GetCondition();
1541 if (condition->IsCondition() &&
1542 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1543 return false;
1544 }
1545
1546 // We can generate a CMOV for this Select.
1547 return true;
1548}
1549
David Brazdil74eb1b22015-12-14 11:44:01 +00001550void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1551 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1552 if (Primitive::IsFloatingPointType(select->GetType())) {
1553 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001554 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001555 } else {
1556 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001557 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001558 if (select->InputAt(1)->IsConstant()) {
1559 locations->SetInAt(1, Location::RequiresRegister());
1560 } else {
1561 locations->SetInAt(1, Location::Any());
1562 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001563 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001564 locations->SetInAt(1, Location::Any());
1565 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001566 }
1567 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1568 locations->SetInAt(2, Location::RequiresRegister());
1569 }
1570 locations->SetOut(Location::SameAsFirstInput());
1571}
1572
1573void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1574 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001575 if (SelectCanUseCMOV(select)) {
1576 // If both the condition and the source types are integer, we can generate
1577 // a CMOV to implement Select.
1578 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001579 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001580 DCHECK(locations->InAt(0).Equals(locations->Out()));
1581
1582 HInstruction* select_condition = select->GetCondition();
1583 Condition cond = kNotEqual;
1584
1585 // Figure out how to test the 'condition'.
1586 if (select_condition->IsCondition()) {
1587 HCondition* condition = select_condition->AsCondition();
1588 if (!condition->IsEmittedAtUseSite()) {
1589 // This was a previously materialized condition.
1590 // Can we use the existing condition code?
1591 if (AreEflagsSetFrom(condition, select)) {
1592 // Materialization was the previous instruction. Condition codes are right.
1593 cond = X86_64IntegerCondition(condition->GetCondition());
1594 } else {
1595 // No, we have to recreate the condition code.
1596 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1597 __ testl(cond_reg, cond_reg);
1598 }
1599 } else {
1600 GenerateCompareTest(condition);
1601 cond = X86_64IntegerCondition(condition->GetCondition());
1602 }
1603 } else {
1604 // Must be a boolean condition, which needs to be compared to 0.
1605 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1606 __ testl(cond_reg, cond_reg);
1607 }
1608
1609 // If the condition is true, overwrite the output, which already contains false.
1610 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001611 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1612 if (value_true_loc.IsRegister()) {
1613 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1614 } else {
1615 __ cmov(cond,
1616 value_false,
1617 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1618 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001619 } else {
1620 NearLabel false_target;
1621 GenerateTestAndBranch<NearLabel>(select,
1622 /* condition_input_index */ 2,
1623 /* true_target */ nullptr,
1624 &false_target);
1625 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1626 __ Bind(&false_target);
1627 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001628}
1629
David Srbecky0cf44932015-12-09 14:09:59 +00001630void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1631 new (GetGraph()->GetArena()) LocationSummary(info);
1632}
1633
1634void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
David Srbeckyb7070a22016-01-08 18:13:53 +00001635 if (codegen_->HasStackMapAtCurrentPc()) {
1636 // Ensure that we do not collide with the stack map of the previous instruction.
1637 __ nop();
1638 }
David Srbecky0cf44932015-12-09 14:09:59 +00001639 codegen_->RecordPcInfo(info, info->GetDexPc());
1640}
1641
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001642void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
1643 local->SetLocations(nullptr);
1644}
1645
1646void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
1647 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1648}
1649
1650void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
1651 local->SetLocations(nullptr);
1652}
1653
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001654void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001655 // Nothing to do, this is driven by the code generator.
1656}
1657
1658void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001659 LocationSummary* locations =
1660 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001661 switch (store->InputAt(1)->GetType()) {
1662 case Primitive::kPrimBoolean:
1663 case Primitive::kPrimByte:
1664 case Primitive::kPrimChar:
1665 case Primitive::kPrimShort:
1666 case Primitive::kPrimInt:
1667 case Primitive::kPrimNot:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001668 case Primitive::kPrimFloat:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001669 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1670 break;
1671
1672 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001673 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001674 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1675 break;
1676
1677 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001678 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001679 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001680}
1681
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001682void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001683}
1684
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001685void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001686 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001687 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001688 // Handle the long/FP comparisons made in instruction simplification.
1689 switch (cond->InputAt(0)->GetType()) {
1690 case Primitive::kPrimLong:
1691 locations->SetInAt(0, Location::RequiresRegister());
1692 locations->SetInAt(1, Location::Any());
1693 break;
1694 case Primitive::kPrimFloat:
1695 case Primitive::kPrimDouble:
1696 locations->SetInAt(0, Location::RequiresFpuRegister());
1697 locations->SetInAt(1, Location::Any());
1698 break;
1699 default:
1700 locations->SetInAt(0, Location::RequiresRegister());
1701 locations->SetInAt(1, Location::Any());
1702 break;
1703 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001704 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001705 locations->SetOut(Location::RequiresRegister());
1706 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001707}
1708
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001709void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001710 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001711 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001712 }
Mark Mendellc4701932015-04-10 13:18:51 -04001713
1714 LocationSummary* locations = cond->GetLocations();
1715 Location lhs = locations->InAt(0);
1716 Location rhs = locations->InAt(1);
1717 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001718 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001719
1720 switch (cond->InputAt(0)->GetType()) {
1721 default:
1722 // Integer case.
1723
1724 // Clear output register: setcc only sets the low byte.
1725 __ xorl(reg, reg);
1726
1727 if (rhs.IsRegister()) {
1728 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1729 } else if (rhs.IsConstant()) {
1730 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001731 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001732 } else {
1733 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1734 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001735 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001736 return;
1737 case Primitive::kPrimLong:
1738 // Clear output register: setcc only sets the low byte.
1739 __ xorl(reg, reg);
1740
1741 if (rhs.IsRegister()) {
1742 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1743 } else if (rhs.IsConstant()) {
1744 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001745 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001746 } else {
1747 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1748 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001749 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001750 return;
1751 case Primitive::kPrimFloat: {
1752 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1753 if (rhs.IsConstant()) {
1754 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1755 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1756 } else if (rhs.IsStackSlot()) {
1757 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1758 } else {
1759 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1760 }
1761 GenerateFPJumps(cond, &true_label, &false_label);
1762 break;
1763 }
1764 case Primitive::kPrimDouble: {
1765 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1766 if (rhs.IsConstant()) {
1767 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1768 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1769 } else if (rhs.IsDoubleStackSlot()) {
1770 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1771 } else {
1772 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1773 }
1774 GenerateFPJumps(cond, &true_label, &false_label);
1775 break;
1776 }
1777 }
1778
1779 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001780 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001781
Roland Levillain4fa13f62015-07-06 18:11:54 +01001782 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001783 __ Bind(&false_label);
1784 __ xorl(reg, reg);
1785 __ jmp(&done_label);
1786
Roland Levillain4fa13f62015-07-06 18:11:54 +01001787 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001788 __ Bind(&true_label);
1789 __ movl(reg, Immediate(1));
1790 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001791}
1792
1793void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001794 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001795}
1796
1797void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001798 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001799}
1800
1801void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001802 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001803}
1804
1805void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001806 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001807}
1808
1809void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001810 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001811}
1812
1813void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001814 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001815}
1816
1817void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001818 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001819}
1820
1821void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001822 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001823}
1824
1825void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001826 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001827}
1828
1829void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001830 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001831}
1832
1833void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001834 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001835}
1836
1837void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001838 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001839}
1840
Aart Bike9f37602015-10-09 11:15:55 -07001841void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001842 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001843}
1844
1845void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001846 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001847}
1848
1849void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001850 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001851}
1852
1853void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001854 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001855}
1856
1857void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001858 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001859}
1860
1861void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001862 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001863}
1864
1865void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001866 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001867}
1868
1869void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001870 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001871}
1872
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001873void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001874 LocationSummary* locations =
1875 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001876 switch (compare->InputAt(0)->GetType()) {
Aart Bika19616e2016-02-01 18:57:58 -08001877 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001878 case Primitive::kPrimLong: {
1879 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001880 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001881 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1882 break;
1883 }
1884 case Primitive::kPrimFloat:
1885 case Primitive::kPrimDouble: {
1886 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001887 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001888 locations->SetOut(Location::RequiresRegister());
1889 break;
1890 }
1891 default:
1892 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1893 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001894}
1895
1896void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001897 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001898 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001899 Location left = locations->InAt(0);
1900 Location right = locations->InAt(1);
1901
Mark Mendell0c9497d2015-08-21 09:30:05 -04001902 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001903 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001904 Condition less_cond = kLess;
1905
Calin Juravleddb7df22014-11-25 20:56:51 +00001906 switch (type) {
Aart Bika19616e2016-02-01 18:57:58 -08001907 case Primitive::kPrimInt: {
1908 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1909 if (right.IsConstant()) {
1910 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1911 codegen_->Compare32BitValue(left_reg, value);
1912 } else if (right.IsStackSlot()) {
1913 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1914 } else {
1915 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1916 }
1917 break;
1918 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001919 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001920 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1921 if (right.IsConstant()) {
1922 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001923 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001924 } else if (right.IsDoubleStackSlot()) {
1925 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001926 } else {
1927 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1928 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001929 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001930 }
1931 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001932 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1933 if (right.IsConstant()) {
1934 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1935 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1936 } else if (right.IsStackSlot()) {
1937 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1938 } else {
1939 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1940 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001941 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001942 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001943 break;
1944 }
1945 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001946 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1947 if (right.IsConstant()) {
1948 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1949 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1950 } else if (right.IsDoubleStackSlot()) {
1951 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1952 } else {
1953 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1954 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001955 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001956 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001957 break;
1958 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001959 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001960 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001961 }
Aart Bika19616e2016-02-01 18:57:58 -08001962
Calin Juravleddb7df22014-11-25 20:56:51 +00001963 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001964 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001965 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001966
Calin Juravle91debbc2014-11-26 19:01:09 +00001967 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001968 __ movl(out, Immediate(1));
1969 __ jmp(&done);
1970
1971 __ Bind(&less);
1972 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001973
1974 __ Bind(&done);
1975}
1976
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001977void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001978 LocationSummary* locations =
1979 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001980 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001981}
1982
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001983void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001984 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001985}
1986
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001987void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1988 LocationSummary* locations =
1989 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1990 locations->SetOut(Location::ConstantLocation(constant));
1991}
1992
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001993void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001994 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001995}
1996
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001997void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001998 LocationSummary* locations =
1999 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002000 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002001}
2002
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002003void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002004 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002005}
2006
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002007void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2008 LocationSummary* locations =
2009 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2010 locations->SetOut(Location::ConstantLocation(constant));
2011}
2012
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002013void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002014 // Will be generated at use site.
2015}
2016
2017void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2018 LocationSummary* locations =
2019 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2020 locations->SetOut(Location::ConstantLocation(constant));
2021}
2022
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002023void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2024 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002025 // Will be generated at use site.
2026}
2027
Calin Juravle27df7582015-04-17 19:12:31 +01002028void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2029 memory_barrier->SetLocations(nullptr);
2030}
2031
2032void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002033 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002034}
2035
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002036void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2037 ret->SetLocations(nullptr);
2038}
2039
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002040void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002041 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002042}
2043
2044void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002045 LocationSummary* locations =
2046 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002047 switch (ret->InputAt(0)->GetType()) {
2048 case Primitive::kPrimBoolean:
2049 case Primitive::kPrimByte:
2050 case Primitive::kPrimChar:
2051 case Primitive::kPrimShort:
2052 case Primitive::kPrimInt:
2053 case Primitive::kPrimNot:
2054 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002055 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002056 break;
2057
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002058 case Primitive::kPrimFloat:
2059 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002060 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002061 break;
2062
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002063 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002064 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002065 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002066}
2067
2068void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2069 if (kIsDebugBuild) {
2070 switch (ret->InputAt(0)->GetType()) {
2071 case Primitive::kPrimBoolean:
2072 case Primitive::kPrimByte:
2073 case Primitive::kPrimChar:
2074 case Primitive::kPrimShort:
2075 case Primitive::kPrimInt:
2076 case Primitive::kPrimNot:
2077 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002078 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002079 break;
2080
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002081 case Primitive::kPrimFloat:
2082 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002083 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002084 XMM0);
2085 break;
2086
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002087 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002088 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002089 }
2090 }
2091 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002092}
2093
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002094Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2095 switch (type) {
2096 case Primitive::kPrimBoolean:
2097 case Primitive::kPrimByte:
2098 case Primitive::kPrimChar:
2099 case Primitive::kPrimShort:
2100 case Primitive::kPrimInt:
2101 case Primitive::kPrimNot:
2102 case Primitive::kPrimLong:
2103 return Location::RegisterLocation(RAX);
2104
2105 case Primitive::kPrimVoid:
2106 return Location::NoLocation();
2107
2108 case Primitive::kPrimDouble:
2109 case Primitive::kPrimFloat:
2110 return Location::FpuRegisterLocation(XMM0);
2111 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002112
2113 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002114}
2115
2116Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2117 return Location::RegisterLocation(kMethodRegisterArgument);
2118}
2119
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002120Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002121 switch (type) {
2122 case Primitive::kPrimBoolean:
2123 case Primitive::kPrimByte:
2124 case Primitive::kPrimChar:
2125 case Primitive::kPrimShort:
2126 case Primitive::kPrimInt:
2127 case Primitive::kPrimNot: {
2128 uint32_t index = gp_index_++;
2129 stack_index_++;
2130 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002131 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002132 } else {
2133 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2134 }
2135 }
2136
2137 case Primitive::kPrimLong: {
2138 uint32_t index = gp_index_;
2139 stack_index_ += 2;
2140 if (index < calling_convention.GetNumberOfRegisters()) {
2141 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002142 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002143 } else {
2144 gp_index_ += 2;
2145 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2146 }
2147 }
2148
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002149 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002150 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002151 stack_index_++;
2152 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002153 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002154 } else {
2155 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2156 }
2157 }
2158
2159 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002160 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002161 stack_index_ += 2;
2162 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002163 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002164 } else {
2165 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2166 }
2167 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002168
2169 case Primitive::kPrimVoid:
2170 LOG(FATAL) << "Unexpected parameter type " << type;
2171 break;
2172 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002173 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002174}
2175
Calin Juravle175dc732015-08-25 15:42:32 +01002176void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2177 // The trampoline uses the same calling convention as dex calling conventions,
2178 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2179 // the method_idx.
2180 HandleInvoke(invoke);
2181}
2182
2183void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2184 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2185}
2186
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002187void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002188 // Explicit clinit checks triggered by static invokes must have been pruned by
2189 // art::PrepareForRegisterAllocation.
2190 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002191
Mark Mendellfb8d2792015-03-31 22:16:59 -04002192 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002193 if (intrinsic.TryDispatch(invoke)) {
2194 return;
2195 }
2196
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002197 HandleInvoke(invoke);
2198}
2199
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002200static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2201 if (invoke->GetLocations()->Intrinsified()) {
2202 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2203 intrinsic.Dispatch(invoke);
2204 return true;
2205 }
2206 return false;
2207}
2208
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002209void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002210 // Explicit clinit checks triggered by static invokes must have been pruned by
2211 // art::PrepareForRegisterAllocation.
2212 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002213
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002214 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2215 return;
2216 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002217
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002218 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002219 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002220 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002221 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002222}
2223
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002224void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002225 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002226 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002227}
2228
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002229void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002230 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002231 if (intrinsic.TryDispatch(invoke)) {
2232 return;
2233 }
2234
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002235 HandleInvoke(invoke);
2236}
2237
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002238void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002239 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2240 return;
2241 }
2242
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002243 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002244 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002245 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002246}
2247
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002248void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2249 HandleInvoke(invoke);
2250 // Add the hidden argument.
2251 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2252}
2253
2254void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2255 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002256 LocationSummary* locations = invoke->GetLocations();
2257 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2258 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002259 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2260 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002261 Location receiver = locations->InAt(0);
2262 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2263
Roland Levillain0d5a2812015-11-13 10:07:31 +00002264 // Set the hidden argument. This is safe to do this here, as RAX
2265 // won't be modified thereafter, before the `call` instruction.
2266 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002267 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002268
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002269 if (receiver.IsStackSlot()) {
2270 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002271 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002272 __ movl(temp, Address(temp, class_offset));
2273 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002274 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002275 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002276 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002277 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002278 // Instead of simply (possibly) unpoisoning `temp` here, we should
2279 // emit a read barrier for the previous class reference load.
2280 // However this is not required in practice, as this is an
2281 // intermediate/temporary reference and because the current
2282 // concurrent copying collector keeps the from-space memory
2283 // intact/accessible until the end of the marking phase (the
2284 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002285 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002286 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002287 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002288 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002289 __ call(Address(temp,
2290 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002291
2292 DCHECK(!codegen_->IsLeafMethod());
2293 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2294}
2295
Roland Levillain88cb1752014-10-20 16:36:47 +01002296void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2297 LocationSummary* locations =
2298 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2299 switch (neg->GetResultType()) {
2300 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002301 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002302 locations->SetInAt(0, Location::RequiresRegister());
2303 locations->SetOut(Location::SameAsFirstInput());
2304 break;
2305
Roland Levillain88cb1752014-10-20 16:36:47 +01002306 case Primitive::kPrimFloat:
2307 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002308 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002309 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002310 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002311 break;
2312
2313 default:
2314 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2315 }
2316}
2317
2318void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2319 LocationSummary* locations = neg->GetLocations();
2320 Location out = locations->Out();
2321 Location in = locations->InAt(0);
2322 switch (neg->GetResultType()) {
2323 case Primitive::kPrimInt:
2324 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002325 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002326 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002327 break;
2328
2329 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002330 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002331 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002332 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002333 break;
2334
Roland Levillain5368c212014-11-27 15:03:41 +00002335 case Primitive::kPrimFloat: {
2336 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002337 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002338 // Implement float negation with an exclusive or with value
2339 // 0x80000000 (mask for bit 31, representing the sign of a
2340 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002341 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002342 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002343 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002344 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002345
Roland Levillain5368c212014-11-27 15:03:41 +00002346 case Primitive::kPrimDouble: {
2347 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002348 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002349 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002350 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002351 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002352 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002353 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002354 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002355 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002356
2357 default:
2358 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2359 }
2360}
2361
Roland Levillaindff1f282014-11-05 14:15:05 +00002362void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2363 LocationSummary* locations =
2364 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2365 Primitive::Type result_type = conversion->GetResultType();
2366 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002367 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002368
David Brazdilb2bd1c52015-03-25 11:17:37 +00002369 // The Java language does not allow treating boolean as an integral type but
2370 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002371
Roland Levillaindff1f282014-11-05 14:15:05 +00002372 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002373 case Primitive::kPrimByte:
2374 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002375 case Primitive::kPrimBoolean:
2376 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002377 case Primitive::kPrimShort:
2378 case Primitive::kPrimInt:
2379 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002380 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002381 locations->SetInAt(0, Location::Any());
2382 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2383 break;
2384
2385 default:
2386 LOG(FATAL) << "Unexpected type conversion from " << input_type
2387 << " to " << result_type;
2388 }
2389 break;
2390
Roland Levillain01a8d712014-11-14 16:27:39 +00002391 case Primitive::kPrimShort:
2392 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002393 case Primitive::kPrimBoolean:
2394 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002395 case Primitive::kPrimByte:
2396 case Primitive::kPrimInt:
2397 case Primitive::kPrimChar:
2398 // Processing a Dex `int-to-short' instruction.
2399 locations->SetInAt(0, Location::Any());
2400 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2401 break;
2402
2403 default:
2404 LOG(FATAL) << "Unexpected type conversion from " << input_type
2405 << " to " << result_type;
2406 }
2407 break;
2408
Roland Levillain946e1432014-11-11 17:35:19 +00002409 case Primitive::kPrimInt:
2410 switch (input_type) {
2411 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002412 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002413 locations->SetInAt(0, Location::Any());
2414 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2415 break;
2416
2417 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002418 // Processing a Dex `float-to-int' instruction.
2419 locations->SetInAt(0, Location::RequiresFpuRegister());
2420 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002421 break;
2422
Roland Levillain946e1432014-11-11 17:35:19 +00002423 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002424 // Processing a Dex `double-to-int' instruction.
2425 locations->SetInAt(0, Location::RequiresFpuRegister());
2426 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002427 break;
2428
2429 default:
2430 LOG(FATAL) << "Unexpected type conversion from " << input_type
2431 << " to " << result_type;
2432 }
2433 break;
2434
Roland Levillaindff1f282014-11-05 14:15:05 +00002435 case Primitive::kPrimLong:
2436 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002437 case Primitive::kPrimBoolean:
2438 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002439 case Primitive::kPrimByte:
2440 case Primitive::kPrimShort:
2441 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002442 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002443 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002444 // TODO: We would benefit from a (to-be-implemented)
2445 // Location::RegisterOrStackSlot requirement for this input.
2446 locations->SetInAt(0, Location::RequiresRegister());
2447 locations->SetOut(Location::RequiresRegister());
2448 break;
2449
2450 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002451 // Processing a Dex `float-to-long' instruction.
2452 locations->SetInAt(0, Location::RequiresFpuRegister());
2453 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002454 break;
2455
Roland Levillaindff1f282014-11-05 14:15:05 +00002456 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002457 // Processing a Dex `double-to-long' instruction.
2458 locations->SetInAt(0, Location::RequiresFpuRegister());
2459 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002460 break;
2461
2462 default:
2463 LOG(FATAL) << "Unexpected type conversion from " << input_type
2464 << " to " << result_type;
2465 }
2466 break;
2467
Roland Levillain981e4542014-11-14 11:47:14 +00002468 case Primitive::kPrimChar:
2469 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002470 case Primitive::kPrimBoolean:
2471 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002472 case Primitive::kPrimByte:
2473 case Primitive::kPrimShort:
2474 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002475 // Processing a Dex `int-to-char' instruction.
2476 locations->SetInAt(0, Location::Any());
2477 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2478 break;
2479
2480 default:
2481 LOG(FATAL) << "Unexpected type conversion from " << input_type
2482 << " to " << result_type;
2483 }
2484 break;
2485
Roland Levillaindff1f282014-11-05 14:15:05 +00002486 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002487 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002488 case Primitive::kPrimBoolean:
2489 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002490 case Primitive::kPrimByte:
2491 case Primitive::kPrimShort:
2492 case Primitive::kPrimInt:
2493 case Primitive::kPrimChar:
2494 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002495 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002496 locations->SetOut(Location::RequiresFpuRegister());
2497 break;
2498
2499 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002500 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002501 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002502 locations->SetOut(Location::RequiresFpuRegister());
2503 break;
2504
Roland Levillaincff13742014-11-17 14:32:17 +00002505 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002506 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002507 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002508 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002509 break;
2510
2511 default:
2512 LOG(FATAL) << "Unexpected type conversion from " << input_type
2513 << " to " << result_type;
2514 };
2515 break;
2516
Roland Levillaindff1f282014-11-05 14:15:05 +00002517 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002518 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002519 case Primitive::kPrimBoolean:
2520 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002521 case Primitive::kPrimByte:
2522 case Primitive::kPrimShort:
2523 case Primitive::kPrimInt:
2524 case Primitive::kPrimChar:
2525 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002526 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002527 locations->SetOut(Location::RequiresFpuRegister());
2528 break;
2529
2530 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002531 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002532 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002533 locations->SetOut(Location::RequiresFpuRegister());
2534 break;
2535
Roland Levillaincff13742014-11-17 14:32:17 +00002536 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002537 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002538 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002539 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002540 break;
2541
2542 default:
2543 LOG(FATAL) << "Unexpected type conversion from " << input_type
2544 << " to " << result_type;
2545 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002546 break;
2547
2548 default:
2549 LOG(FATAL) << "Unexpected type conversion from " << input_type
2550 << " to " << result_type;
2551 }
2552}
2553
2554void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2555 LocationSummary* locations = conversion->GetLocations();
2556 Location out = locations->Out();
2557 Location in = locations->InAt(0);
2558 Primitive::Type result_type = conversion->GetResultType();
2559 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002560 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002561 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002562 case Primitive::kPrimByte:
2563 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002564 case Primitive::kPrimBoolean:
2565 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002566 case Primitive::kPrimShort:
2567 case Primitive::kPrimInt:
2568 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002569 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002570 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002571 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain51d3fc42014-11-13 14:11:42 +00002572 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002573 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002574 Address(CpuRegister(RSP), in.GetStackIndex()));
2575 } else {
2576 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002577 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002578 Immediate(static_cast<int8_t>(in.GetConstant()->AsIntConstant()->GetValue())));
2579 }
2580 break;
2581
2582 default:
2583 LOG(FATAL) << "Unexpected type conversion from " << input_type
2584 << " to " << result_type;
2585 }
2586 break;
2587
Roland Levillain01a8d712014-11-14 16:27:39 +00002588 case Primitive::kPrimShort:
2589 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002590 case Primitive::kPrimBoolean:
2591 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002592 case Primitive::kPrimByte:
2593 case Primitive::kPrimInt:
2594 case Primitive::kPrimChar:
2595 // Processing a Dex `int-to-short' instruction.
2596 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002597 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002598 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002599 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002600 Address(CpuRegister(RSP), in.GetStackIndex()));
2601 } else {
2602 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002603 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002604 Immediate(static_cast<int16_t>(in.GetConstant()->AsIntConstant()->GetValue())));
2605 }
2606 break;
2607
2608 default:
2609 LOG(FATAL) << "Unexpected type conversion from " << input_type
2610 << " to " << result_type;
2611 }
2612 break;
2613
Roland Levillain946e1432014-11-11 17:35:19 +00002614 case Primitive::kPrimInt:
2615 switch (input_type) {
2616 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002617 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002618 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002619 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002620 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002621 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002622 Address(CpuRegister(RSP), in.GetStackIndex()));
2623 } else {
2624 DCHECK(in.IsConstant());
2625 DCHECK(in.GetConstant()->IsLongConstant());
2626 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002627 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002628 }
2629 break;
2630
Roland Levillain3f8f9362014-12-02 17:45:01 +00002631 case Primitive::kPrimFloat: {
2632 // Processing a Dex `float-to-int' instruction.
2633 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2634 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002635 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002636
2637 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002638 // if input >= (float)INT_MAX goto done
2639 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002640 __ j(kAboveEqual, &done);
2641 // if input == NaN goto nan
2642 __ j(kUnordered, &nan);
2643 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002644 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002645 __ jmp(&done);
2646 __ Bind(&nan);
2647 // output = 0
2648 __ xorl(output, output);
2649 __ Bind(&done);
2650 break;
2651 }
2652
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002653 case Primitive::kPrimDouble: {
2654 // Processing a Dex `double-to-int' instruction.
2655 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2656 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002657 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002658
2659 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002660 // if input >= (double)INT_MAX goto done
2661 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002662 __ j(kAboveEqual, &done);
2663 // if input == NaN goto nan
2664 __ j(kUnordered, &nan);
2665 // output = double-to-int-truncate(input)
2666 __ cvttsd2si(output, input);
2667 __ jmp(&done);
2668 __ Bind(&nan);
2669 // output = 0
2670 __ xorl(output, output);
2671 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002672 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002673 }
Roland Levillain946e1432014-11-11 17:35:19 +00002674
2675 default:
2676 LOG(FATAL) << "Unexpected type conversion from " << input_type
2677 << " to " << result_type;
2678 }
2679 break;
2680
Roland Levillaindff1f282014-11-05 14:15:05 +00002681 case Primitive::kPrimLong:
2682 switch (input_type) {
2683 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002684 case Primitive::kPrimBoolean:
2685 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002686 case Primitive::kPrimByte:
2687 case Primitive::kPrimShort:
2688 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002689 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002690 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002691 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002692 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002693 break;
2694
Roland Levillain624279f2014-12-04 11:54:28 +00002695 case Primitive::kPrimFloat: {
2696 // Processing a Dex `float-to-long' instruction.
2697 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2698 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002699 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002700
Mark Mendell92e83bf2015-05-07 11:25:03 -04002701 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002702 // if input >= (float)LONG_MAX goto done
2703 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002704 __ j(kAboveEqual, &done);
2705 // if input == NaN goto nan
2706 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002707 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002708 __ cvttss2si(output, input, true);
2709 __ jmp(&done);
2710 __ Bind(&nan);
2711 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002712 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002713 __ Bind(&done);
2714 break;
2715 }
2716
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002717 case Primitive::kPrimDouble: {
2718 // Processing a Dex `double-to-long' instruction.
2719 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2720 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002721 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002722
Mark Mendell92e83bf2015-05-07 11:25:03 -04002723 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002724 // if input >= (double)LONG_MAX goto done
2725 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002726 __ j(kAboveEqual, &done);
2727 // if input == NaN goto nan
2728 __ j(kUnordered, &nan);
2729 // output = double-to-long-truncate(input)
2730 __ cvttsd2si(output, input, true);
2731 __ jmp(&done);
2732 __ Bind(&nan);
2733 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002734 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002735 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002736 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002737 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002738
2739 default:
2740 LOG(FATAL) << "Unexpected type conversion from " << input_type
2741 << " to " << result_type;
2742 }
2743 break;
2744
Roland Levillain981e4542014-11-14 11:47:14 +00002745 case Primitive::kPrimChar:
2746 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002747 case Primitive::kPrimBoolean:
2748 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002749 case Primitive::kPrimByte:
2750 case Primitive::kPrimShort:
2751 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002752 // Processing a Dex `int-to-char' instruction.
2753 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002754 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain981e4542014-11-14 11:47:14 +00002755 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002756 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002757 Address(CpuRegister(RSP), in.GetStackIndex()));
2758 } else {
2759 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002760 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002761 Immediate(static_cast<uint16_t>(
2762 in.GetConstant()->AsIntConstant()->GetValue())));
Roland Levillain981e4542014-11-14 11:47:14 +00002763 }
2764 break;
2765
2766 default:
2767 LOG(FATAL) << "Unexpected type conversion from " << input_type
2768 << " to " << result_type;
2769 }
2770 break;
2771
Roland Levillaindff1f282014-11-05 14:15:05 +00002772 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002773 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002774 case Primitive::kPrimBoolean:
2775 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002776 case Primitive::kPrimByte:
2777 case Primitive::kPrimShort:
2778 case Primitive::kPrimInt:
2779 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002780 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002781 if (in.IsRegister()) {
2782 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2783 } else if (in.IsConstant()) {
2784 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2785 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002786 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002787 } else {
2788 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2789 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2790 }
Roland Levillaincff13742014-11-17 14:32:17 +00002791 break;
2792
2793 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002794 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002795 if (in.IsRegister()) {
2796 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2797 } else if (in.IsConstant()) {
2798 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2799 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002800 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002801 } else {
2802 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2803 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2804 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002805 break;
2806
Roland Levillaincff13742014-11-17 14:32:17 +00002807 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002808 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002809 if (in.IsFpuRegister()) {
2810 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2811 } else if (in.IsConstant()) {
2812 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2813 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002814 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002815 } else {
2816 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2817 Address(CpuRegister(RSP), in.GetStackIndex()));
2818 }
Roland Levillaincff13742014-11-17 14:32:17 +00002819 break;
2820
2821 default:
2822 LOG(FATAL) << "Unexpected type conversion from " << input_type
2823 << " to " << result_type;
2824 };
2825 break;
2826
Roland Levillaindff1f282014-11-05 14:15:05 +00002827 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002828 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002829 case Primitive::kPrimBoolean:
2830 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002831 case Primitive::kPrimByte:
2832 case Primitive::kPrimShort:
2833 case Primitive::kPrimInt:
2834 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002835 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002836 if (in.IsRegister()) {
2837 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2838 } else if (in.IsConstant()) {
2839 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2840 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002841 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002842 } else {
2843 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2844 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2845 }
Roland Levillaincff13742014-11-17 14:32:17 +00002846 break;
2847
2848 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002849 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002850 if (in.IsRegister()) {
2851 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2852 } else if (in.IsConstant()) {
2853 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2854 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002855 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002856 } else {
2857 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2858 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2859 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002860 break;
2861
Roland Levillaincff13742014-11-17 14:32:17 +00002862 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002863 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002864 if (in.IsFpuRegister()) {
2865 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2866 } else if (in.IsConstant()) {
2867 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2868 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002869 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002870 } else {
2871 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2872 Address(CpuRegister(RSP), in.GetStackIndex()));
2873 }
Roland Levillaincff13742014-11-17 14:32:17 +00002874 break;
2875
2876 default:
2877 LOG(FATAL) << "Unexpected type conversion from " << input_type
2878 << " to " << result_type;
2879 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002880 break;
2881
2882 default:
2883 LOG(FATAL) << "Unexpected type conversion from " << input_type
2884 << " to " << result_type;
2885 }
2886}
2887
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002888void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002889 LocationSummary* locations =
2890 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002891 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002892 case Primitive::kPrimInt: {
2893 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002894 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2895 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002896 break;
2897 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002898
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002899 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002900 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002901 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002902 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002903 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002904 break;
2905 }
2906
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002907 case Primitive::kPrimDouble:
2908 case Primitive::kPrimFloat: {
2909 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002910 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002911 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002912 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002913 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002914
2915 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002916 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002917 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002918}
2919
2920void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2921 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002922 Location first = locations->InAt(0);
2923 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002924 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002925
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002926 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002927 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002928 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002929 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2930 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002931 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2932 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002933 } else {
2934 __ leal(out.AsRegister<CpuRegister>(), Address(
2935 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2936 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002937 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002938 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2939 __ addl(out.AsRegister<CpuRegister>(),
2940 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2941 } else {
2942 __ leal(out.AsRegister<CpuRegister>(), Address(
2943 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2944 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002945 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002946 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002947 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002948 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002949 break;
2950 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002951
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002952 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002953 if (second.IsRegister()) {
2954 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2955 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002956 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2957 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002958 } else {
2959 __ leaq(out.AsRegister<CpuRegister>(), Address(
2960 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2961 }
2962 } else {
2963 DCHECK(second.IsConstant());
2964 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2965 int32_t int32_value = Low32Bits(value);
2966 DCHECK_EQ(int32_value, value);
2967 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2968 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2969 } else {
2970 __ leaq(out.AsRegister<CpuRegister>(), Address(
2971 first.AsRegister<CpuRegister>(), int32_value));
2972 }
2973 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002974 break;
2975 }
2976
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002977 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002978 if (second.IsFpuRegister()) {
2979 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2980 } else if (second.IsConstant()) {
2981 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002982 codegen_->LiteralFloatAddress(
2983 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002984 } else {
2985 DCHECK(second.IsStackSlot());
2986 __ addss(first.AsFpuRegister<XmmRegister>(),
2987 Address(CpuRegister(RSP), second.GetStackIndex()));
2988 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002989 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002990 }
2991
2992 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002993 if (second.IsFpuRegister()) {
2994 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2995 } else if (second.IsConstant()) {
2996 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002997 codegen_->LiteralDoubleAddress(
2998 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002999 } else {
3000 DCHECK(second.IsDoubleStackSlot());
3001 __ addsd(first.AsFpuRegister<XmmRegister>(),
3002 Address(CpuRegister(RSP), second.GetStackIndex()));
3003 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003004 break;
3005 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003006
3007 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003008 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003009 }
3010}
3011
3012void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003013 LocationSummary* locations =
3014 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003015 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003016 case Primitive::kPrimInt: {
3017 locations->SetInAt(0, Location::RequiresRegister());
3018 locations->SetInAt(1, Location::Any());
3019 locations->SetOut(Location::SameAsFirstInput());
3020 break;
3021 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003022 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003023 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003024 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003025 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003026 break;
3027 }
Calin Juravle11351682014-10-23 15:38:15 +01003028 case Primitive::kPrimFloat:
3029 case Primitive::kPrimDouble: {
3030 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003031 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003032 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003033 break;
Calin Juravle11351682014-10-23 15:38:15 +01003034 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003035 default:
Calin Juravle11351682014-10-23 15:38:15 +01003036 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003038}
3039
3040void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3041 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003042 Location first = locations->InAt(0);
3043 Location second = locations->InAt(1);
3044 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003045 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003046 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003047 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003048 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003049 } else if (second.IsConstant()) {
3050 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003051 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003052 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003053 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003054 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003055 break;
3056 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003057 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003058 if (second.IsConstant()) {
3059 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3060 DCHECK(IsInt<32>(value));
3061 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3062 } else {
3063 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3064 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003065 break;
3066 }
3067
Calin Juravle11351682014-10-23 15:38:15 +01003068 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003069 if (second.IsFpuRegister()) {
3070 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3071 } else if (second.IsConstant()) {
3072 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003073 codegen_->LiteralFloatAddress(
3074 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003075 } else {
3076 DCHECK(second.IsStackSlot());
3077 __ subss(first.AsFpuRegister<XmmRegister>(),
3078 Address(CpuRegister(RSP), second.GetStackIndex()));
3079 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003080 break;
Calin Juravle11351682014-10-23 15:38:15 +01003081 }
3082
3083 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003084 if (second.IsFpuRegister()) {
3085 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3086 } else if (second.IsConstant()) {
3087 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003088 codegen_->LiteralDoubleAddress(
3089 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003090 } else {
3091 DCHECK(second.IsDoubleStackSlot());
3092 __ subsd(first.AsFpuRegister<XmmRegister>(),
3093 Address(CpuRegister(RSP), second.GetStackIndex()));
3094 }
Calin Juravle11351682014-10-23 15:38:15 +01003095 break;
3096 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003097
3098 default:
Calin Juravle11351682014-10-23 15:38:15 +01003099 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003100 }
3101}
3102
Calin Juravle34bacdf2014-10-07 20:23:36 +01003103void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3104 LocationSummary* locations =
3105 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3106 switch (mul->GetResultType()) {
3107 case Primitive::kPrimInt: {
3108 locations->SetInAt(0, Location::RequiresRegister());
3109 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003110 if (mul->InputAt(1)->IsIntConstant()) {
3111 // Can use 3 operand multiply.
3112 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3113 } else {
3114 locations->SetOut(Location::SameAsFirstInput());
3115 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003116 break;
3117 }
3118 case Primitive::kPrimLong: {
3119 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003120 locations->SetInAt(1, Location::Any());
3121 if (mul->InputAt(1)->IsLongConstant() &&
3122 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003123 // Can use 3 operand multiply.
3124 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3125 } else {
3126 locations->SetOut(Location::SameAsFirstInput());
3127 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003128 break;
3129 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003130 case Primitive::kPrimFloat:
3131 case Primitive::kPrimDouble: {
3132 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003133 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003134 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003135 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003136 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003137
3138 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003139 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003140 }
3141}
3142
3143void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3144 LocationSummary* locations = mul->GetLocations();
3145 Location first = locations->InAt(0);
3146 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003147 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003148 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003149 case Primitive::kPrimInt:
3150 // The constant may have ended up in a register, so test explicitly to avoid
3151 // problems where the output may not be the same as the first operand.
3152 if (mul->InputAt(1)->IsIntConstant()) {
3153 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3154 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3155 } else if (second.IsRegister()) {
3156 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003157 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003158 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003159 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003160 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003161 __ imull(first.AsRegister<CpuRegister>(),
3162 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003163 }
3164 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003165 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003166 // The constant may have ended up in a register, so test explicitly to avoid
3167 // problems where the output may not be the same as the first operand.
3168 if (mul->InputAt(1)->IsLongConstant()) {
3169 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3170 if (IsInt<32>(value)) {
3171 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3172 Immediate(static_cast<int32_t>(value)));
3173 } else {
3174 // Have to use the constant area.
3175 DCHECK(first.Equals(out));
3176 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3177 }
3178 } else if (second.IsRegister()) {
3179 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003180 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003181 } else {
3182 DCHECK(second.IsDoubleStackSlot());
3183 DCHECK(first.Equals(out));
3184 __ imulq(first.AsRegister<CpuRegister>(),
3185 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003186 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003187 break;
3188 }
3189
Calin Juravleb5bfa962014-10-21 18:02:24 +01003190 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003191 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003192 if (second.IsFpuRegister()) {
3193 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3194 } else if (second.IsConstant()) {
3195 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003196 codegen_->LiteralFloatAddress(
3197 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003198 } else {
3199 DCHECK(second.IsStackSlot());
3200 __ mulss(first.AsFpuRegister<XmmRegister>(),
3201 Address(CpuRegister(RSP), second.GetStackIndex()));
3202 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003203 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003204 }
3205
3206 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003207 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003208 if (second.IsFpuRegister()) {
3209 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3210 } else if (second.IsConstant()) {
3211 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003212 codegen_->LiteralDoubleAddress(
3213 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003214 } else {
3215 DCHECK(second.IsDoubleStackSlot());
3216 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3217 Address(CpuRegister(RSP), second.GetStackIndex()));
3218 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003219 break;
3220 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003221
3222 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003223 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003224 }
3225}
3226
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003227void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3228 uint32_t stack_adjustment, bool is_float) {
3229 if (source.IsStackSlot()) {
3230 DCHECK(is_float);
3231 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3232 } else if (source.IsDoubleStackSlot()) {
3233 DCHECK(!is_float);
3234 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3235 } else {
3236 // Write the value to the temporary location on the stack and load to FP stack.
3237 if (is_float) {
3238 Location stack_temp = Location::StackSlot(temp_offset);
3239 codegen_->Move(stack_temp, source);
3240 __ flds(Address(CpuRegister(RSP), temp_offset));
3241 } else {
3242 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3243 codegen_->Move(stack_temp, source);
3244 __ fldl(Address(CpuRegister(RSP), temp_offset));
3245 }
3246 }
3247}
3248
3249void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3250 Primitive::Type type = rem->GetResultType();
3251 bool is_float = type == Primitive::kPrimFloat;
3252 size_t elem_size = Primitive::ComponentSize(type);
3253 LocationSummary* locations = rem->GetLocations();
3254 Location first = locations->InAt(0);
3255 Location second = locations->InAt(1);
3256 Location out = locations->Out();
3257
3258 // Create stack space for 2 elements.
3259 // TODO: enhance register allocator to ask for stack temporaries.
3260 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3261
3262 // Load the values to the FP stack in reverse order, using temporaries if needed.
3263 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3264 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3265
3266 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003267 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003268 __ Bind(&retry);
3269 __ fprem();
3270
3271 // Move FP status to AX.
3272 __ fstsw();
3273
3274 // And see if the argument reduction is complete. This is signaled by the
3275 // C2 FPU flag bit set to 0.
3276 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3277 __ j(kNotEqual, &retry);
3278
3279 // We have settled on the final value. Retrieve it into an XMM register.
3280 // Store FP top of stack to real stack.
3281 if (is_float) {
3282 __ fsts(Address(CpuRegister(RSP), 0));
3283 } else {
3284 __ fstl(Address(CpuRegister(RSP), 0));
3285 }
3286
3287 // Pop the 2 items from the FP stack.
3288 __ fucompp();
3289
3290 // Load the value from the stack into an XMM register.
3291 DCHECK(out.IsFpuRegister()) << out;
3292 if (is_float) {
3293 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3294 } else {
3295 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3296 }
3297
3298 // And remove the temporary stack space we allocated.
3299 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3300}
3301
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003302void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3303 DCHECK(instruction->IsDiv() || instruction->IsRem());
3304
3305 LocationSummary* locations = instruction->GetLocations();
3306 Location second = locations->InAt(1);
3307 DCHECK(second.IsConstant());
3308
3309 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3310 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003311 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003312
3313 DCHECK(imm == 1 || imm == -1);
3314
3315 switch (instruction->GetResultType()) {
3316 case Primitive::kPrimInt: {
3317 if (instruction->IsRem()) {
3318 __ xorl(output_register, output_register);
3319 } else {
3320 __ movl(output_register, input_register);
3321 if (imm == -1) {
3322 __ negl(output_register);
3323 }
3324 }
3325 break;
3326 }
3327
3328 case Primitive::kPrimLong: {
3329 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003330 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003331 } else {
3332 __ movq(output_register, input_register);
3333 if (imm == -1) {
3334 __ negq(output_register);
3335 }
3336 }
3337 break;
3338 }
3339
3340 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003341 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003342 }
3343}
3344
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003345void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003346 LocationSummary* locations = instruction->GetLocations();
3347 Location second = locations->InAt(1);
3348
3349 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3350 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3351
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003352 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003353 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3354 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003355
3356 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3357
3358 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003359 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003360 __ testl(numerator, numerator);
3361 __ cmov(kGreaterEqual, tmp, numerator);
3362 int shift = CTZ(imm);
3363 __ sarl(tmp, Immediate(shift));
3364
3365 if (imm < 0) {
3366 __ negl(tmp);
3367 }
3368
3369 __ movl(output_register, tmp);
3370 } else {
3371 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3372 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3373
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003374 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003375 __ addq(rdx, numerator);
3376 __ testq(numerator, numerator);
3377 __ cmov(kGreaterEqual, rdx, numerator);
3378 int shift = CTZ(imm);
3379 __ sarq(rdx, Immediate(shift));
3380
3381 if (imm < 0) {
3382 __ negq(rdx);
3383 }
3384
3385 __ movq(output_register, rdx);
3386 }
3387}
3388
3389void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3390 DCHECK(instruction->IsDiv() || instruction->IsRem());
3391
3392 LocationSummary* locations = instruction->GetLocations();
3393 Location second = locations->InAt(1);
3394
3395 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3396 : locations->GetTemp(0).AsRegister<CpuRegister>();
3397 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3398 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3399 : locations->Out().AsRegister<CpuRegister>();
3400 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3401
3402 DCHECK_EQ(RAX, eax.AsRegister());
3403 DCHECK_EQ(RDX, edx.AsRegister());
3404 if (instruction->IsDiv()) {
3405 DCHECK_EQ(RAX, out.AsRegister());
3406 } else {
3407 DCHECK_EQ(RDX, out.AsRegister());
3408 }
3409
3410 int64_t magic;
3411 int shift;
3412
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003413 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003414 if (instruction->GetResultType() == Primitive::kPrimInt) {
3415 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3416
3417 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3418
3419 __ movl(numerator, eax);
3420
Mark Mendell0c9497d2015-08-21 09:30:05 -04003421 NearLabel no_div;
3422 NearLabel end;
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003423 __ testl(eax, eax);
3424 __ j(kNotEqual, &no_div);
3425
3426 __ xorl(out, out);
3427 __ jmp(&end);
3428
3429 __ Bind(&no_div);
3430
3431 __ movl(eax, Immediate(magic));
3432 __ imull(numerator);
3433
3434 if (imm > 0 && magic < 0) {
3435 __ addl(edx, numerator);
3436 } else if (imm < 0 && magic > 0) {
3437 __ subl(edx, numerator);
3438 }
3439
3440 if (shift != 0) {
3441 __ sarl(edx, Immediate(shift));
3442 }
3443
3444 __ movl(eax, edx);
3445 __ shrl(edx, Immediate(31));
3446 __ addl(edx, eax);
3447
3448 if (instruction->IsRem()) {
3449 __ movl(eax, numerator);
3450 __ imull(edx, Immediate(imm));
3451 __ subl(eax, edx);
3452 __ movl(edx, eax);
3453 } else {
3454 __ movl(eax, edx);
3455 }
3456 __ Bind(&end);
3457 } else {
3458 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3459
3460 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3461
3462 CpuRegister rax = eax;
3463 CpuRegister rdx = edx;
3464
3465 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3466
3467 // Save the numerator.
3468 __ movq(numerator, rax);
3469
3470 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003471 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003472
3473 // RDX:RAX = magic * numerator
3474 __ imulq(numerator);
3475
3476 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003477 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003478 __ addq(rdx, numerator);
3479 } else if (imm < 0 && magic > 0) {
3480 // RDX -= numerator
3481 __ subq(rdx, numerator);
3482 }
3483
3484 // Shift if needed.
3485 if (shift != 0) {
3486 __ sarq(rdx, Immediate(shift));
3487 }
3488
3489 // RDX += 1 if RDX < 0
3490 __ movq(rax, rdx);
3491 __ shrq(rdx, Immediate(63));
3492 __ addq(rdx, rax);
3493
3494 if (instruction->IsRem()) {
3495 __ movq(rax, numerator);
3496
3497 if (IsInt<32>(imm)) {
3498 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3499 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003500 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003501 }
3502
3503 __ subq(rax, rdx);
3504 __ movq(rdx, rax);
3505 } else {
3506 __ movq(rax, rdx);
3507 }
3508 }
3509}
3510
Calin Juravlebacfec32014-11-14 15:54:36 +00003511void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3512 DCHECK(instruction->IsDiv() || instruction->IsRem());
3513 Primitive::Type type = instruction->GetResultType();
3514 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3515
3516 bool is_div = instruction->IsDiv();
3517 LocationSummary* locations = instruction->GetLocations();
3518
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003519 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3520 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003521
Roland Levillain271ab9c2014-11-27 15:23:57 +00003522 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003523 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003524
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003525 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003526 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003527
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003528 if (imm == 0) {
3529 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3530 } else if (imm == 1 || imm == -1) {
3531 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003532 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003533 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003534 } else {
3535 DCHECK(imm <= -2 || imm >= 2);
3536 GenerateDivRemWithAnyConstant(instruction);
3537 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003538 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003539 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003540 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
3541 out.AsRegister(), type, is_div);
3542 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003543
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003544 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3545 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3546 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3547 // so it's safe to just use negl instead of more complex comparisons.
3548 if (type == Primitive::kPrimInt) {
3549 __ cmpl(second_reg, Immediate(-1));
3550 __ j(kEqual, slow_path->GetEntryLabel());
3551 // edx:eax <- sign-extended of eax
3552 __ cdq();
3553 // eax = quotient, edx = remainder
3554 __ idivl(second_reg);
3555 } else {
3556 __ cmpq(second_reg, Immediate(-1));
3557 __ j(kEqual, slow_path->GetEntryLabel());
3558 // rdx:rax <- sign-extended of rax
3559 __ cqo();
3560 // rax = quotient, rdx = remainder
3561 __ idivq(second_reg);
3562 }
3563 __ Bind(slow_path->GetExitLabel());
3564 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003565}
3566
Calin Juravle7c4954d2014-10-28 16:57:40 +00003567void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3568 LocationSummary* locations =
3569 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3570 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003571 case Primitive::kPrimInt:
3572 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003573 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003574 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003575 locations->SetOut(Location::SameAsFirstInput());
3576 // Intel uses edx:eax as the dividend.
3577 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3579 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3580 // output and request another temp.
3581 if (div->InputAt(1)->IsConstant()) {
3582 locations->AddTemp(Location::RequiresRegister());
3583 }
Calin Juravled0d48522014-11-04 16:40:20 +00003584 break;
3585 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003586
Calin Juravle7c4954d2014-10-28 16:57:40 +00003587 case Primitive::kPrimFloat:
3588 case Primitive::kPrimDouble: {
3589 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003590 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003591 locations->SetOut(Location::SameAsFirstInput());
3592 break;
3593 }
3594
3595 default:
3596 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3597 }
3598}
3599
3600void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3601 LocationSummary* locations = div->GetLocations();
3602 Location first = locations->InAt(0);
3603 Location second = locations->InAt(1);
3604 DCHECK(first.Equals(locations->Out()));
3605
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003606 Primitive::Type type = div->GetResultType();
3607 switch (type) {
3608 case Primitive::kPrimInt:
3609 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003610 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003611 break;
3612 }
3613
Calin Juravle7c4954d2014-10-28 16:57:40 +00003614 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003615 if (second.IsFpuRegister()) {
3616 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3617 } else if (second.IsConstant()) {
3618 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003619 codegen_->LiteralFloatAddress(
3620 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003621 } else {
3622 DCHECK(second.IsStackSlot());
3623 __ divss(first.AsFpuRegister<XmmRegister>(),
3624 Address(CpuRegister(RSP), second.GetStackIndex()));
3625 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003626 break;
3627 }
3628
3629 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003630 if (second.IsFpuRegister()) {
3631 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3632 } else if (second.IsConstant()) {
3633 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003634 codegen_->LiteralDoubleAddress(
3635 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003636 } else {
3637 DCHECK(second.IsDoubleStackSlot());
3638 __ divsd(first.AsFpuRegister<XmmRegister>(),
3639 Address(CpuRegister(RSP), second.GetStackIndex()));
3640 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003641 break;
3642 }
3643
3644 default:
3645 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3646 }
3647}
3648
Calin Juravlebacfec32014-11-14 15:54:36 +00003649void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003650 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003651 LocationSummary* locations =
3652 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003653
3654 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003655 case Primitive::kPrimInt:
3656 case Primitive::kPrimLong: {
3657 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003658 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003659 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3660 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003661 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3662 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3663 // output and request another temp.
3664 if (rem->InputAt(1)->IsConstant()) {
3665 locations->AddTemp(Location::RequiresRegister());
3666 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003667 break;
3668 }
3669
3670 case Primitive::kPrimFloat:
3671 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003672 locations->SetInAt(0, Location::Any());
3673 locations->SetInAt(1, Location::Any());
3674 locations->SetOut(Location::RequiresFpuRegister());
3675 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003676 break;
3677 }
3678
3679 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003680 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003681 }
3682}
3683
3684void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3685 Primitive::Type type = rem->GetResultType();
3686 switch (type) {
3687 case Primitive::kPrimInt:
3688 case Primitive::kPrimLong: {
3689 GenerateDivRemIntegral(rem);
3690 break;
3691 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003692 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003693 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003694 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003695 break;
3696 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003697 default:
3698 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3699 }
3700}
3701
Calin Juravled0d48522014-11-04 16:40:20 +00003702void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003703 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3704 ? LocationSummary::kCallOnSlowPath
3705 : LocationSummary::kNoCall;
3706 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003707 locations->SetInAt(0, Location::Any());
3708 if (instruction->HasUses()) {
3709 locations->SetOut(Location::SameAsFirstInput());
3710 }
3711}
3712
3713void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003714 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003715 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3716 codegen_->AddSlowPath(slow_path);
3717
3718 LocationSummary* locations = instruction->GetLocations();
3719 Location value = locations->InAt(0);
3720
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003721 switch (instruction->GetType()) {
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003722 case Primitive::kPrimByte:
3723 case Primitive::kPrimChar:
3724 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003725 case Primitive::kPrimInt: {
3726 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003727 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003728 __ j(kEqual, slow_path->GetEntryLabel());
3729 } else if (value.IsStackSlot()) {
3730 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3731 __ j(kEqual, slow_path->GetEntryLabel());
3732 } else {
3733 DCHECK(value.IsConstant()) << value;
3734 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3735 __ jmp(slow_path->GetEntryLabel());
3736 }
3737 }
3738 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003739 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003740 case Primitive::kPrimLong: {
3741 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003742 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003743 __ j(kEqual, slow_path->GetEntryLabel());
3744 } else if (value.IsDoubleStackSlot()) {
3745 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3746 __ j(kEqual, slow_path->GetEntryLabel());
3747 } else {
3748 DCHECK(value.IsConstant()) << value;
3749 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3750 __ jmp(slow_path->GetEntryLabel());
3751 }
3752 }
3753 break;
3754 }
3755 default:
3756 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003757 }
Calin Juravled0d48522014-11-04 16:40:20 +00003758}
3759
Calin Juravle9aec02f2014-11-18 23:06:35 +00003760void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3761 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3762
3763 LocationSummary* locations =
3764 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3765
3766 switch (op->GetResultType()) {
3767 case Primitive::kPrimInt:
3768 case Primitive::kPrimLong: {
3769 locations->SetInAt(0, Location::RequiresRegister());
3770 // The shift count needs to be in CL.
3771 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3772 locations->SetOut(Location::SameAsFirstInput());
3773 break;
3774 }
3775 default:
3776 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3777 }
3778}
3779
3780void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3781 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3782
3783 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003784 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003785 Location second = locations->InAt(1);
3786
3787 switch (op->GetResultType()) {
3788 case Primitive::kPrimInt: {
3789 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003790 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003791 if (op->IsShl()) {
3792 __ shll(first_reg, second_reg);
3793 } else if (op->IsShr()) {
3794 __ sarl(first_reg, second_reg);
3795 } else {
3796 __ shrl(first_reg, second_reg);
3797 }
3798 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003799 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003800 if (op->IsShl()) {
3801 __ shll(first_reg, imm);
3802 } else if (op->IsShr()) {
3803 __ sarl(first_reg, imm);
3804 } else {
3805 __ shrl(first_reg, imm);
3806 }
3807 }
3808 break;
3809 }
3810 case Primitive::kPrimLong: {
3811 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003812 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003813 if (op->IsShl()) {
3814 __ shlq(first_reg, second_reg);
3815 } else if (op->IsShr()) {
3816 __ sarq(first_reg, second_reg);
3817 } else {
3818 __ shrq(first_reg, second_reg);
3819 }
3820 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003821 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003822 if (op->IsShl()) {
3823 __ shlq(first_reg, imm);
3824 } else if (op->IsShr()) {
3825 __ sarq(first_reg, imm);
3826 } else {
3827 __ shrq(first_reg, imm);
3828 }
3829 }
3830 break;
3831 }
3832 default:
3833 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003834 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003835 }
3836}
3837
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003838void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3839 LocationSummary* locations =
3840 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3841
3842 switch (ror->GetResultType()) {
3843 case Primitive::kPrimInt:
3844 case Primitive::kPrimLong: {
3845 locations->SetInAt(0, Location::RequiresRegister());
3846 // The shift count needs to be in CL (unless it is a constant).
3847 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3848 locations->SetOut(Location::SameAsFirstInput());
3849 break;
3850 }
3851 default:
3852 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3853 UNREACHABLE();
3854 }
3855}
3856
3857void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3858 LocationSummary* locations = ror->GetLocations();
3859 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3860 Location second = locations->InAt(1);
3861
3862 switch (ror->GetResultType()) {
3863 case Primitive::kPrimInt:
3864 if (second.IsRegister()) {
3865 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3866 __ rorl(first_reg, second_reg);
3867 } else {
3868 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
3869 __ rorl(first_reg, imm);
3870 }
3871 break;
3872 case Primitive::kPrimLong:
3873 if (second.IsRegister()) {
3874 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3875 __ rorq(first_reg, second_reg);
3876 } else {
3877 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
3878 __ rorq(first_reg, imm);
3879 }
3880 break;
3881 default:
3882 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3883 UNREACHABLE();
3884 }
3885}
3886
Calin Juravle9aec02f2014-11-18 23:06:35 +00003887void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3888 HandleShift(shl);
3889}
3890
3891void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3892 HandleShift(shl);
3893}
3894
3895void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3896 HandleShift(shr);
3897}
3898
3899void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3900 HandleShift(shr);
3901}
3902
3903void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3904 HandleShift(ushr);
3905}
3906
3907void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3908 HandleShift(ushr);
3909}
3910
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003911void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003912 LocationSummary* locations =
3913 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003914 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003915 if (instruction->IsStringAlloc()) {
3916 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3917 } else {
3918 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3919 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3920 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003921 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003922}
3923
3924void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003925 // Note: if heap poisoning is enabled, the entry point takes cares
3926 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003927 if (instruction->IsStringAlloc()) {
3928 // String is allocated through StringFactory. Call NewEmptyString entry point.
3929 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3930 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3931 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3932 __ call(Address(temp, code_offset.SizeValue()));
3933 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3934 } else {
3935 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3936 instruction,
3937 instruction->GetDexPc(),
3938 nullptr);
3939 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3940 DCHECK(!codegen_->IsLeafMethod());
3941 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003942}
3943
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003944void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3945 LocationSummary* locations =
3946 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3947 InvokeRuntimeCallingConvention calling_convention;
3948 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003949 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003950 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003951 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003952}
3953
3954void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3955 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003956 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3957 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003958 // Note: if heap poisoning is enabled, the entry point takes cares
3959 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003960 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3961 instruction,
3962 instruction->GetDexPc(),
3963 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003964 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003965
3966 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003967}
3968
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003969void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003970 LocationSummary* locations =
3971 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003972 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3973 if (location.IsStackSlot()) {
3974 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3975 } else if (location.IsDoubleStackSlot()) {
3976 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3977 }
3978 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003979}
3980
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003981void InstructionCodeGeneratorX86_64::VisitParameterValue(
3982 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003983 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003984}
3985
3986void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3987 LocationSummary* locations =
3988 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3989 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3990}
3991
3992void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3993 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3994 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003995}
3996
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003997void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3998 LocationSummary* locations =
3999 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4000 locations->SetInAt(0, Location::RequiresRegister());
4001 locations->SetOut(Location::RequiresRegister());
4002}
4003
4004void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4005 LocationSummary* locations = instruction->GetLocations();
4006 uint32_t method_offset = 0;
4007 if (instruction->GetTableKind() == HClassTableGet::kVTable) {
4008 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4009 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
4010 } else {
4011 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
4012 instruction->GetIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
4013 }
4014 __ movq(locations->Out().AsRegister<CpuRegister>(),
4015 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
4016}
4017
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004018void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004019 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004020 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004021 locations->SetInAt(0, Location::RequiresRegister());
4022 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004023}
4024
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004025void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4026 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004027 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4028 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004029 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004030 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004031 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004032 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004033 break;
4034
4035 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004036 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004037 break;
4038
4039 default:
4040 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4041 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004042}
4043
David Brazdil66d126e2015-04-03 16:02:44 +01004044void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4045 LocationSummary* locations =
4046 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4047 locations->SetInAt(0, Location::RequiresRegister());
4048 locations->SetOut(Location::SameAsFirstInput());
4049}
4050
4051void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004052 LocationSummary* locations = bool_not->GetLocations();
4053 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4054 locations->Out().AsRegister<CpuRegister>().AsRegister());
4055 Location out = locations->Out();
4056 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4057}
4058
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004059void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004060 LocationSummary* locations =
4061 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004062 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4063 locations->SetInAt(i, Location::Any());
4064 }
4065 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004066}
4067
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004068void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004069 LOG(FATAL) << "Unimplemented";
4070}
4071
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004072void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004073 /*
4074 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004075 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004076 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4077 */
4078 switch (kind) {
4079 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004080 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004081 break;
4082 }
4083 case MemBarrierKind::kAnyStore:
4084 case MemBarrierKind::kLoadAny:
4085 case MemBarrierKind::kStoreStore: {
4086 // nop
4087 break;
4088 }
4089 default:
4090 LOG(FATAL) << "Unexpected memory barier " << kind;
4091 }
4092}
4093
4094void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4095 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4096
Roland Levillain0d5a2812015-11-13 10:07:31 +00004097 bool object_field_get_with_read_barrier =
4098 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004099 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004100 new (GetGraph()->GetArena()) LocationSummary(instruction,
4101 object_field_get_with_read_barrier ?
4102 LocationSummary::kCallOnSlowPath :
4103 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004104 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004105 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4106 locations->SetOut(Location::RequiresFpuRegister());
4107 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004108 // The output overlaps for an object field get when read barriers
4109 // are enabled: we do not want the move to overwrite the object's
4110 // location, as we need it to emit the read barrier.
4111 locations->SetOut(
4112 Location::RequiresRegister(),
4113 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004114 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004115 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4116 // We need a temporary register for the read barrier marking slow
4117 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4118 locations->AddTemp(Location::RequiresRegister());
4119 }
Calin Juravle52c48962014-12-16 17:02:57 +00004120}
4121
4122void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4123 const FieldInfo& field_info) {
4124 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4125
4126 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004127 Location base_loc = locations->InAt(0);
4128 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004129 Location out = locations->Out();
4130 bool is_volatile = field_info.IsVolatile();
4131 Primitive::Type field_type = field_info.GetFieldType();
4132 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4133
4134 switch (field_type) {
4135 case Primitive::kPrimBoolean: {
4136 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4137 break;
4138 }
4139
4140 case Primitive::kPrimByte: {
4141 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4142 break;
4143 }
4144
4145 case Primitive::kPrimShort: {
4146 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4147 break;
4148 }
4149
4150 case Primitive::kPrimChar: {
4151 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4152 break;
4153 }
4154
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004155 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004156 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4157 break;
4158 }
4159
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004160 case Primitive::kPrimNot: {
4161 // /* HeapReference<Object> */ out = *(base + offset)
4162 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4163 Location temp_loc = locations->GetTemp(0);
4164 // Note that a potential implicit null check is handled in this
4165 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4166 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4167 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4168 if (is_volatile) {
4169 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4170 }
4171 } else {
4172 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4173 codegen_->MaybeRecordImplicitNullCheck(instruction);
4174 if (is_volatile) {
4175 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4176 }
4177 // If read barriers are enabled, emit read barriers other than
4178 // Baker's using a slow path (and also unpoison the loaded
4179 // reference, if heap poisoning is enabled).
4180 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4181 }
4182 break;
4183 }
4184
Calin Juravle52c48962014-12-16 17:02:57 +00004185 case Primitive::kPrimLong: {
4186 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4187 break;
4188 }
4189
4190 case Primitive::kPrimFloat: {
4191 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4192 break;
4193 }
4194
4195 case Primitive::kPrimDouble: {
4196 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4197 break;
4198 }
4199
4200 case Primitive::kPrimVoid:
4201 LOG(FATAL) << "Unreachable type " << field_type;
4202 UNREACHABLE();
4203 }
4204
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004205 if (field_type == Primitive::kPrimNot) {
4206 // Potential implicit null checks, in the case of reference
4207 // fields, are handled in the previous switch statement.
4208 } else {
4209 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004210 }
Roland Levillain4d027112015-07-01 15:41:14 +01004211
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004212 if (is_volatile) {
4213 if (field_type == Primitive::kPrimNot) {
4214 // Memory barriers, in the case of references, are also handled
4215 // in the previous switch statement.
4216 } else {
4217 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4218 }
Roland Levillain4d027112015-07-01 15:41:14 +01004219 }
Calin Juravle52c48962014-12-16 17:02:57 +00004220}
4221
4222void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4223 const FieldInfo& field_info) {
4224 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4225
4226 LocationSummary* locations =
4227 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004228 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004229 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004230 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004231 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004232
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004233 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004234 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004235 if (is_volatile) {
4236 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4237 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4238 } else {
4239 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4240 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004241 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004242 if (is_volatile) {
4243 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4244 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4245 } else {
4246 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4247 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004248 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004249 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004250 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004251 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004252 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004253 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4254 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004255 locations->AddTemp(Location::RequiresRegister());
4256 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004257}
4258
Calin Juravle52c48962014-12-16 17:02:57 +00004259void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004260 const FieldInfo& field_info,
4261 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004262 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4263
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004264 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004265 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4266 Location value = locations->InAt(1);
4267 bool is_volatile = field_info.IsVolatile();
4268 Primitive::Type field_type = field_info.GetFieldType();
4269 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4270
4271 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004272 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004273 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004274
Mark Mendellea5af682015-10-22 17:35:49 -04004275 bool maybe_record_implicit_null_check_done = false;
4276
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004277 switch (field_type) {
4278 case Primitive::kPrimBoolean:
4279 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004280 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004281 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004282 __ movb(Address(base, offset), Immediate(v));
4283 } else {
4284 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4285 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004286 break;
4287 }
4288
4289 case Primitive::kPrimShort:
4290 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004291 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004292 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004293 __ movw(Address(base, offset), Immediate(v));
4294 } else {
4295 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4296 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004297 break;
4298 }
4299
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004300 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004301 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004302 if (value.IsConstant()) {
4303 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004304 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4305 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4306 // Note: if heap poisoning is enabled, no need to poison
4307 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004308 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004309 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004310 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4311 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4312 __ movl(temp, value.AsRegister<CpuRegister>());
4313 __ PoisonHeapReference(temp);
4314 __ movl(Address(base, offset), temp);
4315 } else {
4316 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4317 }
Mark Mendell40741f32015-04-20 22:10:34 -04004318 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004319 break;
4320 }
4321
4322 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004323 if (value.IsConstant()) {
4324 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004325 codegen_->MoveInt64ToAddress(Address(base, offset),
4326 Address(base, offset + sizeof(int32_t)),
4327 v,
4328 instruction);
4329 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004330 } else {
4331 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4332 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004333 break;
4334 }
4335
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004336 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004337 if (value.IsConstant()) {
4338 int32_t v =
4339 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4340 __ movl(Address(base, offset), Immediate(v));
4341 } else {
4342 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4343 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004344 break;
4345 }
4346
4347 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004348 if (value.IsConstant()) {
4349 int64_t v =
4350 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4351 codegen_->MoveInt64ToAddress(Address(base, offset),
4352 Address(base, offset + sizeof(int32_t)),
4353 v,
4354 instruction);
4355 maybe_record_implicit_null_check_done = true;
4356 } else {
4357 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4358 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004359 break;
4360 }
4361
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004362 case Primitive::kPrimVoid:
4363 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004364 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004365 }
Calin Juravle52c48962014-12-16 17:02:57 +00004366
Mark Mendellea5af682015-10-22 17:35:49 -04004367 if (!maybe_record_implicit_null_check_done) {
4368 codegen_->MaybeRecordImplicitNullCheck(instruction);
4369 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004370
4371 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4372 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4373 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004374 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004375 }
4376
Calin Juravle52c48962014-12-16 17:02:57 +00004377 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004378 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004379 }
4380}
4381
4382void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4383 HandleFieldSet(instruction, instruction->GetFieldInfo());
4384}
4385
4386void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004387 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004388}
4389
4390void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004391 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004392}
4393
4394void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004395 HandleFieldGet(instruction, instruction->GetFieldInfo());
4396}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004397
Calin Juravle52c48962014-12-16 17:02:57 +00004398void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4399 HandleFieldGet(instruction);
4400}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004401
Calin Juravle52c48962014-12-16 17:02:57 +00004402void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4403 HandleFieldGet(instruction, instruction->GetFieldInfo());
4404}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004405
Calin Juravle52c48962014-12-16 17:02:57 +00004406void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4407 HandleFieldSet(instruction, instruction->GetFieldInfo());
4408}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004409
Calin Juravle52c48962014-12-16 17:02:57 +00004410void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004411 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004412}
4413
Calin Juravlee460d1d2015-09-29 04:52:17 +01004414void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4415 HUnresolvedInstanceFieldGet* instruction) {
4416 FieldAccessCallingConventionX86_64 calling_convention;
4417 codegen_->CreateUnresolvedFieldLocationSummary(
4418 instruction, instruction->GetFieldType(), calling_convention);
4419}
4420
4421void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4422 HUnresolvedInstanceFieldGet* instruction) {
4423 FieldAccessCallingConventionX86_64 calling_convention;
4424 codegen_->GenerateUnresolvedFieldAccess(instruction,
4425 instruction->GetFieldType(),
4426 instruction->GetFieldIndex(),
4427 instruction->GetDexPc(),
4428 calling_convention);
4429}
4430
4431void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4432 HUnresolvedInstanceFieldSet* instruction) {
4433 FieldAccessCallingConventionX86_64 calling_convention;
4434 codegen_->CreateUnresolvedFieldLocationSummary(
4435 instruction, instruction->GetFieldType(), calling_convention);
4436}
4437
4438void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4439 HUnresolvedInstanceFieldSet* instruction) {
4440 FieldAccessCallingConventionX86_64 calling_convention;
4441 codegen_->GenerateUnresolvedFieldAccess(instruction,
4442 instruction->GetFieldType(),
4443 instruction->GetFieldIndex(),
4444 instruction->GetDexPc(),
4445 calling_convention);
4446}
4447
4448void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4449 HUnresolvedStaticFieldGet* instruction) {
4450 FieldAccessCallingConventionX86_64 calling_convention;
4451 codegen_->CreateUnresolvedFieldLocationSummary(
4452 instruction, instruction->GetFieldType(), calling_convention);
4453}
4454
4455void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4456 HUnresolvedStaticFieldGet* instruction) {
4457 FieldAccessCallingConventionX86_64 calling_convention;
4458 codegen_->GenerateUnresolvedFieldAccess(instruction,
4459 instruction->GetFieldType(),
4460 instruction->GetFieldIndex(),
4461 instruction->GetDexPc(),
4462 calling_convention);
4463}
4464
4465void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4466 HUnresolvedStaticFieldSet* instruction) {
4467 FieldAccessCallingConventionX86_64 calling_convention;
4468 codegen_->CreateUnresolvedFieldLocationSummary(
4469 instruction, instruction->GetFieldType(), calling_convention);
4470}
4471
4472void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4473 HUnresolvedStaticFieldSet* instruction) {
4474 FieldAccessCallingConventionX86_64 calling_convention;
4475 codegen_->GenerateUnresolvedFieldAccess(instruction,
4476 instruction->GetFieldType(),
4477 instruction->GetFieldIndex(),
4478 instruction->GetDexPc(),
4479 calling_convention);
4480}
4481
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004482void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004483 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4484 ? LocationSummary::kCallOnSlowPath
4485 : LocationSummary::kNoCall;
4486 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4487 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004488 ? Location::RequiresRegister()
4489 : Location::Any();
4490 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004491 if (instruction->HasUses()) {
4492 locations->SetOut(Location::SameAsFirstInput());
4493 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004494}
4495
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004496void InstructionCodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004497 if (codegen_->CanMoveNullCheckToUser(instruction)) {
4498 return;
4499 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004500 LocationSummary* locations = instruction->GetLocations();
4501 Location obj = locations->InAt(0);
4502
4503 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
4504 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4505}
4506
4507void InstructionCodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004508 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004509 codegen_->AddSlowPath(slow_path);
4510
4511 LocationSummary* locations = instruction->GetLocations();
4512 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004513
4514 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004515 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004516 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004517 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004518 } else {
4519 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004520 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004521 __ jmp(slow_path->GetEntryLabel());
4522 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004523 }
4524 __ j(kEqual, slow_path->GetEntryLabel());
4525}
4526
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004527void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004528 if (codegen_->IsImplicitNullCheckAllowed(instruction)) {
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004529 GenerateImplicitNullCheck(instruction);
4530 } else {
4531 GenerateExplicitNullCheck(instruction);
4532 }
4533}
4534
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004535void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004536 bool object_array_get_with_read_barrier =
4537 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004538 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004539 new (GetGraph()->GetArena()) LocationSummary(instruction,
4540 object_array_get_with_read_barrier ?
4541 LocationSummary::kCallOnSlowPath :
4542 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004543 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004544 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004545 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4546 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4547 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004548 // The output overlaps for an object array get when read barriers
4549 // are enabled: we do not want the move to overwrite the array's
4550 // location, as we need it to emit the read barrier.
4551 locations->SetOut(
4552 Location::RequiresRegister(),
4553 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004554 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004555 // We need a temporary register for the read barrier marking slow
4556 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4557 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4558 locations->AddTemp(Location::RequiresRegister());
4559 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004560}
4561
4562void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4563 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004564 Location obj_loc = locations->InAt(0);
4565 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004566 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004567 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004568
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004569 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004570 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004571 case Primitive::kPrimBoolean: {
4572 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004573 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004574 if (index.IsConstant()) {
4575 __ movzxb(out, Address(obj,
4576 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4577 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004578 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004579 }
4580 break;
4581 }
4582
4583 case Primitive::kPrimByte: {
4584 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004585 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004586 if (index.IsConstant()) {
4587 __ movsxb(out, Address(obj,
4588 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4589 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004590 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004591 }
4592 break;
4593 }
4594
4595 case Primitive::kPrimShort: {
4596 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004597 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004598 if (index.IsConstant()) {
4599 __ movsxw(out, Address(obj,
4600 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4601 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004602 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004603 }
4604 break;
4605 }
4606
4607 case Primitive::kPrimChar: {
4608 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004609 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004610 if (index.IsConstant()) {
4611 __ movzxw(out, Address(obj,
4612 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4613 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004614 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004615 }
4616 break;
4617 }
4618
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004619 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004620 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004621 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004622 if (index.IsConstant()) {
4623 __ movl(out, Address(obj,
4624 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4625 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004626 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004627 }
4628 break;
4629 }
4630
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004631 case Primitive::kPrimNot: {
4632 static_assert(
4633 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4634 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4635 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4636 // /* HeapReference<Object> */ out =
4637 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4638 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4639 Location temp = locations->GetTemp(0);
4640 // Note that a potential implicit null check is handled in this
4641 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4642 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4643 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4644 } else {
4645 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4646 if (index.IsConstant()) {
4647 uint32_t offset =
4648 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4649 __ movl(out, Address(obj, offset));
4650 codegen_->MaybeRecordImplicitNullCheck(instruction);
4651 // If read barriers are enabled, emit read barriers other than
4652 // Baker's using a slow path (and also unpoison the loaded
4653 // reference, if heap poisoning is enabled).
4654 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4655 } else {
4656 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4657 codegen_->MaybeRecordImplicitNullCheck(instruction);
4658 // If read barriers are enabled, emit read barriers other than
4659 // Baker's using a slow path (and also unpoison the loaded
4660 // reference, if heap poisoning is enabled).
4661 codegen_->MaybeGenerateReadBarrierSlow(
4662 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4663 }
4664 }
4665 break;
4666 }
4667
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004668 case Primitive::kPrimLong: {
4669 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004670 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004671 if (index.IsConstant()) {
4672 __ movq(out, Address(obj,
4673 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4674 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004675 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004676 }
4677 break;
4678 }
4679
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004680 case Primitive::kPrimFloat: {
4681 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004682 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004683 if (index.IsConstant()) {
4684 __ movss(out, Address(obj,
4685 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4686 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004687 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004688 }
4689 break;
4690 }
4691
4692 case Primitive::kPrimDouble: {
4693 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004694 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004695 if (index.IsConstant()) {
4696 __ movsd(out, Address(obj,
4697 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4698 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004699 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004700 }
4701 break;
4702 }
4703
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004704 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004705 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004706 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004707 }
Roland Levillain4d027112015-07-01 15:41:14 +01004708
4709 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004710 // Potential implicit null checks, in the case of reference
4711 // arrays, are handled in the previous switch statement.
4712 } else {
4713 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004714 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004715}
4716
4717void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004718 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004719
4720 bool needs_write_barrier =
4721 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004722 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004723 bool object_array_set_with_read_barrier =
4724 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004725
Nicolas Geoffray39468442014-09-02 15:17:15 +01004726 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004727 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004728 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004729 LocationSummary::kCallOnSlowPath :
4730 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004731
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004732 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004733 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4734 if (Primitive::IsFloatingPointType(value_type)) {
4735 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004736 } else {
4737 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4738 }
4739
4740 if (needs_write_barrier) {
4741 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004742
4743 // This first temporary register is possibly used for heap
4744 // reference poisoning and/or read barrier emission too.
4745 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004746 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004747 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004748}
4749
4750void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4751 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004752 Location array_loc = locations->InAt(0);
4753 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004754 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004755 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004756 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004757 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004758 bool needs_write_barrier =
4759 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004760 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4761 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4762 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004763
4764 switch (value_type) {
4765 case Primitive::kPrimBoolean:
4766 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004767 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4768 Address address = index.IsConstant()
4769 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4770 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4771 if (value.IsRegister()) {
4772 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004773 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004774 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004775 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004776 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004777 break;
4778 }
4779
4780 case Primitive::kPrimShort:
4781 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004782 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4783 Address address = index.IsConstant()
4784 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4785 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4786 if (value.IsRegister()) {
4787 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004788 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004789 DCHECK(value.IsConstant()) << value;
4790 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004791 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004792 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004793 break;
4794 }
4795
4796 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004797 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4798 Address address = index.IsConstant()
4799 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4800 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004801
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004802 if (!value.IsRegister()) {
4803 // Just setting null.
4804 DCHECK(instruction->InputAt(2)->IsNullConstant());
4805 DCHECK(value.IsConstant()) << value;
4806 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004807 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004808 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004809 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004810 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004811 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004812
4813 DCHECK(needs_write_barrier);
4814 CpuRegister register_value = value.AsRegister<CpuRegister>();
4815 NearLabel done, not_null, do_put;
4816 SlowPathCode* slow_path = nullptr;
4817 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004818 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004819 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4820 codegen_->AddSlowPath(slow_path);
4821 if (instruction->GetValueCanBeNull()) {
4822 __ testl(register_value, register_value);
4823 __ j(kNotEqual, &not_null);
4824 __ movl(address, Immediate(0));
4825 codegen_->MaybeRecordImplicitNullCheck(instruction);
4826 __ jmp(&done);
4827 __ Bind(&not_null);
4828 }
4829
Roland Levillain0d5a2812015-11-13 10:07:31 +00004830 if (kEmitCompilerReadBarrier) {
4831 // When read barriers are enabled, the type checking
4832 // instrumentation requires two read barriers:
4833 //
4834 // __ movl(temp2, temp);
4835 // // /* HeapReference<Class> */ temp = temp->component_type_
4836 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004837 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004838 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4839 //
4840 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4841 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004842 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004843 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4844 //
4845 // __ cmpl(temp, temp2);
4846 //
4847 // However, the second read barrier may trash `temp`, as it
4848 // is a temporary register, and as such would not be saved
4849 // along with live registers before calling the runtime (nor
4850 // restored afterwards). So in this case, we bail out and
4851 // delegate the work to the array set slow path.
4852 //
4853 // TODO: Extend the register allocator to support a new
4854 // "(locally) live temp" location so as to avoid always
4855 // going into the slow path when read barriers are enabled.
4856 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004857 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004858 // /* HeapReference<Class> */ temp = array->klass_
4859 __ movl(temp, Address(array, class_offset));
4860 codegen_->MaybeRecordImplicitNullCheck(instruction);
4861 __ MaybeUnpoisonHeapReference(temp);
4862
4863 // /* HeapReference<Class> */ temp = temp->component_type_
4864 __ movl(temp, Address(temp, component_offset));
4865 // If heap poisoning is enabled, no need to unpoison `temp`
4866 // nor the object reference in `register_value->klass`, as
4867 // we are comparing two poisoned references.
4868 __ cmpl(temp, Address(register_value, class_offset));
4869
4870 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4871 __ j(kEqual, &do_put);
4872 // If heap poisoning is enabled, the `temp` reference has
4873 // not been unpoisoned yet; unpoison it now.
4874 __ MaybeUnpoisonHeapReference(temp);
4875
4876 // /* HeapReference<Class> */ temp = temp->super_class_
4877 __ movl(temp, Address(temp, super_offset));
4878 // If heap poisoning is enabled, no need to unpoison
4879 // `temp`, as we are comparing against null below.
4880 __ testl(temp, temp);
4881 __ j(kNotEqual, slow_path->GetEntryLabel());
4882 __ Bind(&do_put);
4883 } else {
4884 __ j(kNotEqual, slow_path->GetEntryLabel());
4885 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004886 }
4887 }
4888
4889 if (kPoisonHeapReferences) {
4890 __ movl(temp, register_value);
4891 __ PoisonHeapReference(temp);
4892 __ movl(address, temp);
4893 } else {
4894 __ movl(address, register_value);
4895 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004896 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004897 codegen_->MaybeRecordImplicitNullCheck(instruction);
4898 }
4899
4900 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4901 codegen_->MarkGCCard(
4902 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4903 __ Bind(&done);
4904
4905 if (slow_path != nullptr) {
4906 __ Bind(slow_path->GetExitLabel());
4907 }
4908
4909 break;
4910 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004911
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004912 case Primitive::kPrimInt: {
4913 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4914 Address address = index.IsConstant()
4915 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4916 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4917 if (value.IsRegister()) {
4918 __ movl(address, value.AsRegister<CpuRegister>());
4919 } else {
4920 DCHECK(value.IsConstant()) << value;
4921 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4922 __ movl(address, Immediate(v));
4923 }
4924 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004925 break;
4926 }
4927
4928 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004929 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4930 Address address = index.IsConstant()
4931 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4932 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4933 if (value.IsRegister()) {
4934 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004935 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004936 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004937 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004938 Address address_high = index.IsConstant()
4939 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4940 offset + sizeof(int32_t))
4941 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4942 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004943 }
4944 break;
4945 }
4946
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004947 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004948 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4949 Address address = index.IsConstant()
4950 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4951 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004952 if (value.IsFpuRegister()) {
4953 __ movss(address, value.AsFpuRegister<XmmRegister>());
4954 } else {
4955 DCHECK(value.IsConstant());
4956 int32_t v =
4957 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4958 __ movl(address, Immediate(v));
4959 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004960 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004961 break;
4962 }
4963
4964 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004965 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4966 Address address = index.IsConstant()
4967 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4968 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004969 if (value.IsFpuRegister()) {
4970 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4971 codegen_->MaybeRecordImplicitNullCheck(instruction);
4972 } else {
4973 int64_t v =
4974 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4975 Address address_high = index.IsConstant()
4976 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4977 offset + sizeof(int32_t))
4978 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4979 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4980 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004981 break;
4982 }
4983
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004984 case Primitive::kPrimVoid:
4985 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004986 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004987 }
4988}
4989
4990void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004991 LocationSummary* locations =
4992 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004993 locations->SetInAt(0, Location::RequiresRegister());
4994 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004995}
4996
4997void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
4998 LocationSummary* locations = instruction->GetLocations();
4999 uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
Roland Levillain271ab9c2014-11-27 15:23:57 +00005000 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5001 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005002 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005003 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005004}
5005
5006void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005007 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5008 ? LocationSummary::kCallOnSlowPath
5009 : LocationSummary::kNoCall;
5010 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005011 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04005012 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005013 if (instruction->HasUses()) {
5014 locations->SetOut(Location::SameAsFirstInput());
5015 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005016}
5017
5018void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5019 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005020 Location index_loc = locations->InAt(0);
5021 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005022 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005023 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005024
Mark Mendell99dbd682015-04-22 16:18:52 -04005025 if (length_loc.IsConstant()) {
5026 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5027 if (index_loc.IsConstant()) {
5028 // BCE will remove the bounds check if we are guarenteed to pass.
5029 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5030 if (index < 0 || index >= length) {
5031 codegen_->AddSlowPath(slow_path);
5032 __ jmp(slow_path->GetEntryLabel());
5033 } else {
5034 // Some optimization after BCE may have generated this, and we should not
5035 // generate a bounds check if it is a valid range.
5036 }
5037 return;
5038 }
5039
5040 // We have to reverse the jump condition because the length is the constant.
5041 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5042 __ cmpl(index_reg, Immediate(length));
5043 codegen_->AddSlowPath(slow_path);
5044 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005045 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005046 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5047 if (index_loc.IsConstant()) {
5048 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5049 __ cmpl(length, Immediate(value));
5050 } else {
5051 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5052 }
5053 codegen_->AddSlowPath(slow_path);
5054 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005055 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005056}
5057
5058void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5059 CpuRegister card,
5060 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005061 CpuRegister value,
5062 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005063 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005064 if (value_can_be_null) {
5065 __ testl(value, value);
5066 __ j(kEqual, &is_null);
5067 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005068 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5069 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005070 __ movq(temp, object);
5071 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005072 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005073 if (value_can_be_null) {
5074 __ Bind(&is_null);
5075 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005076}
5077
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005078void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005079 LOG(FATAL) << "Unimplemented";
5080}
5081
5082void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005083 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5084}
5085
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005086void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5087 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5088}
5089
5090void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005091 HBasicBlock* block = instruction->GetBlock();
5092 if (block->GetLoopInformation() != nullptr) {
5093 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5094 // The back edge will generate the suspend check.
5095 return;
5096 }
5097 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5098 // The goto will generate the suspend check.
5099 return;
5100 }
5101 GenerateSuspendCheck(instruction, nullptr);
5102}
5103
5104void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5105 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005106 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005107 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5108 if (slow_path == nullptr) {
5109 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5110 instruction->SetSlowPath(slow_path);
5111 codegen_->AddSlowPath(slow_path);
5112 if (successor != nullptr) {
5113 DCHECK(successor->IsLoopHeader());
5114 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5115 }
5116 } else {
5117 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5118 }
5119
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005120 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5121 /* no_rip */ true),
5122 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005123 if (successor == nullptr) {
5124 __ j(kNotEqual, slow_path->GetEntryLabel());
5125 __ Bind(slow_path->GetReturnLabel());
5126 } else {
5127 __ j(kEqual, codegen_->GetLabelOf(successor));
5128 __ jmp(slow_path->GetEntryLabel());
5129 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005130}
5131
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005132X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5133 return codegen_->GetAssembler();
5134}
5135
5136void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005137 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005138 Location source = move->GetSource();
5139 Location destination = move->GetDestination();
5140
5141 if (source.IsRegister()) {
5142 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005143 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005144 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005145 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005146 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005147 } else {
5148 DCHECK(destination.IsDoubleStackSlot());
5149 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005150 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005151 }
5152 } else if (source.IsStackSlot()) {
5153 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005154 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005155 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005156 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005157 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005158 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005159 } else {
5160 DCHECK(destination.IsStackSlot());
5161 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5162 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5163 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005164 } else if (source.IsDoubleStackSlot()) {
5165 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005166 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005167 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005168 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005169 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5170 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005171 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005172 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005173 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5174 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5175 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005176 } else if (source.IsConstant()) {
5177 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005178 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5179 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005180 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005181 if (value == 0) {
5182 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5183 } else {
5184 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5185 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005186 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005187 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005188 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005189 }
5190 } else if (constant->IsLongConstant()) {
5191 int64_t value = constant->AsLongConstant()->GetValue();
5192 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005193 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005194 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005195 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005196 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005197 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005198 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005199 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005200 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005201 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005202 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005203 } else {
5204 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005205 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005206 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5207 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005208 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005209 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005210 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005211 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005212 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005213 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005214 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005215 } else {
5216 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005217 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005218 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005219 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005220 } else if (source.IsFpuRegister()) {
5221 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005222 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005223 } else if (destination.IsStackSlot()) {
5224 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005225 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005226 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005227 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005228 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005229 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005230 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005231 }
5232}
5233
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005234void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005235 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005236 __ movl(Address(CpuRegister(RSP), mem), reg);
5237 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005238}
5239
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005240void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005241 ScratchRegisterScope ensure_scratch(
5242 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5243
5244 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5245 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5246 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5247 Address(CpuRegister(RSP), mem2 + stack_offset));
5248 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5249 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5250 CpuRegister(ensure_scratch.GetRegister()));
5251}
5252
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005253void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5254 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5255 __ movq(Address(CpuRegister(RSP), mem), reg);
5256 __ movq(reg, CpuRegister(TMP));
5257}
5258
5259void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5260 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005261 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005262
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005263 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5264 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5265 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5266 Address(CpuRegister(RSP), mem2 + stack_offset));
5267 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5268 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5269 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005270}
5271
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005272void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5273 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5274 __ movss(Address(CpuRegister(RSP), mem), reg);
5275 __ movd(reg, CpuRegister(TMP));
5276}
5277
5278void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5279 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5280 __ movsd(Address(CpuRegister(RSP), mem), reg);
5281 __ movd(reg, CpuRegister(TMP));
5282}
5283
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005284void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005285 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005286 Location source = move->GetSource();
5287 Location destination = move->GetDestination();
5288
5289 if (source.IsRegister() && destination.IsRegister()) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005290 __ xchgq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005291 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005292 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005293 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005294 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005295 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005296 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5297 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005298 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005299 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005300 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005301 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5302 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005303 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005304 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5305 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5306 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005307 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005308 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005309 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005310 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005311 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005312 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005313 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005314 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005315 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005316 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005317 }
5318}
5319
5320
5321void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5322 __ pushq(CpuRegister(reg));
5323}
5324
5325
5326void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5327 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005328}
5329
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005330void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005331 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005332 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5333 Immediate(mirror::Class::kStatusInitialized));
5334 __ j(kLess, slow_path->GetEntryLabel());
5335 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005336 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005337}
5338
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005339void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005340 InvokeRuntimeCallingConvention calling_convention;
5341 CodeGenerator::CreateLoadClassLocationSummary(
5342 cls,
5343 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005344 Location::RegisterLocation(RAX),
5345 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005346}
5347
5348void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005349 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005350 if (cls->NeedsAccessCheck()) {
5351 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5352 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5353 cls,
5354 cls->GetDexPc(),
5355 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005356 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005357 return;
5358 }
5359
Roland Levillain0d5a2812015-11-13 10:07:31 +00005360 Location out_loc = locations->Out();
5361 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005362 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005363
Calin Juravle580b6092015-10-06 17:35:58 +01005364 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005365 DCHECK(!cls->CanCallRuntime());
5366 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005367 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5368 GenerateGcRootFieldLoad(
5369 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005370 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005371 // /* GcRoot<mirror::Class>[] */ out =
5372 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5373 __ movq(out, Address(current_method,
5374 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005375 // /* GcRoot<mirror::Class> */ out = out[type_index]
5376 GenerateGcRootFieldLoad(cls, out_loc, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Roland Levillain4d027112015-07-01 15:41:14 +01005377
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005378 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5379 DCHECK(cls->CanCallRuntime());
5380 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5381 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5382 codegen_->AddSlowPath(slow_path);
5383 if (!cls->IsInDexCache()) {
5384 __ testl(out, out);
5385 __ j(kEqual, slow_path->GetEntryLabel());
5386 }
5387 if (cls->MustGenerateClinitCheck()) {
5388 GenerateClassInitializationCheck(slow_path, out);
5389 } else {
5390 __ Bind(slow_path->GetExitLabel());
5391 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005392 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005393 }
5394}
5395
5396void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5397 LocationSummary* locations =
5398 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5399 locations->SetInAt(0, Location::RequiresRegister());
5400 if (check->HasUses()) {
5401 locations->SetOut(Location::SameAsFirstInput());
5402 }
5403}
5404
5405void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005406 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005407 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005408 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005409 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005410 GenerateClassInitializationCheck(slow_path,
5411 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005412}
5413
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005414void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005415 LocationSummary::CallKind call_kind = (!load->IsInDexCache() || kEmitCompilerReadBarrier)
5416 ? LocationSummary::kCallOnSlowPath
5417 : LocationSummary::kNoCall;
5418 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005419 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005420 locations->SetOut(Location::RequiresRegister());
5421}
5422
5423void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005424 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005425 Location out_loc = locations->Out();
5426 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005427 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005428
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005429 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5430 GenerateGcRootFieldLoad(
5431 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005432 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5433 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005434 // /* GcRoot<mirror::String> */ out = out[string_index]
5435 GenerateGcRootFieldLoad(
5436 load, out_loc, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005437
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005438 if (!load->IsInDexCache()) {
5439 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5440 codegen_->AddSlowPath(slow_path);
5441 __ testl(out, out);
5442 __ j(kEqual, slow_path->GetEntryLabel());
5443 __ Bind(slow_path->GetExitLabel());
5444 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005445}
5446
David Brazdilcb1c0552015-08-04 16:22:25 +01005447static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005448 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5449 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005450}
5451
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005452void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5453 LocationSummary* locations =
5454 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5455 locations->SetOut(Location::RequiresRegister());
5456}
5457
5458void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005459 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5460}
5461
5462void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5463 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5464}
5465
5466void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5467 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005468}
5469
5470void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5471 LocationSummary* locations =
5472 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5473 InvokeRuntimeCallingConvention calling_convention;
5474 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5475}
5476
5477void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005478 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5479 instruction,
5480 instruction->GetDexPc(),
5481 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005482 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005483}
5484
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005485static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5486 return kEmitCompilerReadBarrier &&
5487 (kUseBakerReadBarrier ||
5488 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5489 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5490 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5491}
5492
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005493void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005494 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005495 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5496 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005497 case TypeCheckKind::kExactCheck:
5498 case TypeCheckKind::kAbstractClassCheck:
5499 case TypeCheckKind::kClassHierarchyCheck:
5500 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005501 call_kind =
5502 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005503 break;
5504 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005505 case TypeCheckKind::kUnresolvedCheck:
5506 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005507 call_kind = LocationSummary::kCallOnSlowPath;
5508 break;
5509 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005510
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005511 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005512 locations->SetInAt(0, Location::RequiresRegister());
5513 locations->SetInAt(1, Location::Any());
5514 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5515 locations->SetOut(Location::RequiresRegister());
5516 // When read barriers are enabled, we need a temporary register for
5517 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005518 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005519 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005520 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005521}
5522
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005523void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005524 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005525 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005526 Location obj_loc = locations->InAt(0);
5527 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005528 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005529 Location out_loc = locations->Out();
5530 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005531 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005532 locations->GetTemp(0) :
5533 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005534 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005535 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5536 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5537 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005538 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005539 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005540
5541 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005542 // Avoid null check if we know obj is not null.
5543 if (instruction->MustDoNullCheck()) {
5544 __ testl(obj, obj);
5545 __ j(kEqual, &zero);
5546 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005547
Roland Levillain0d5a2812015-11-13 10:07:31 +00005548 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005549 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005550
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005551 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005552 case TypeCheckKind::kExactCheck: {
5553 if (cls.IsRegister()) {
5554 __ cmpl(out, cls.AsRegister<CpuRegister>());
5555 } else {
5556 DCHECK(cls.IsStackSlot()) << cls;
5557 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5558 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005559 if (zero.IsLinked()) {
5560 // Classes must be equal for the instanceof to succeed.
5561 __ j(kNotEqual, &zero);
5562 __ movl(out, Immediate(1));
5563 __ jmp(&done);
5564 } else {
5565 __ setcc(kEqual, out);
5566 // setcc only sets the low byte.
5567 __ andl(out, Immediate(1));
5568 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005569 break;
5570 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005571
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005572 case TypeCheckKind::kAbstractClassCheck: {
5573 // If the class is abstract, we eagerly fetch the super class of the
5574 // object to avoid doing a comparison we know will fail.
5575 NearLabel loop, success;
5576 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005577 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005578 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005579 __ testl(out, out);
5580 // If `out` is null, we use it for the result, and jump to `done`.
5581 __ j(kEqual, &done);
5582 if (cls.IsRegister()) {
5583 __ cmpl(out, cls.AsRegister<CpuRegister>());
5584 } else {
5585 DCHECK(cls.IsStackSlot()) << cls;
5586 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5587 }
5588 __ j(kNotEqual, &loop);
5589 __ movl(out, Immediate(1));
5590 if (zero.IsLinked()) {
5591 __ jmp(&done);
5592 }
5593 break;
5594 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005595
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005596 case TypeCheckKind::kClassHierarchyCheck: {
5597 // Walk over the class hierarchy to find a match.
5598 NearLabel loop, success;
5599 __ Bind(&loop);
5600 if (cls.IsRegister()) {
5601 __ cmpl(out, cls.AsRegister<CpuRegister>());
5602 } else {
5603 DCHECK(cls.IsStackSlot()) << cls;
5604 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5605 }
5606 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005607 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005608 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005609 __ testl(out, out);
5610 __ j(kNotEqual, &loop);
5611 // If `out` is null, we use it for the result, and jump to `done`.
5612 __ jmp(&done);
5613 __ Bind(&success);
5614 __ movl(out, Immediate(1));
5615 if (zero.IsLinked()) {
5616 __ jmp(&done);
5617 }
5618 break;
5619 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005620
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005621 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005622 // Do an exact check.
5623 NearLabel exact_check;
5624 if (cls.IsRegister()) {
5625 __ cmpl(out, cls.AsRegister<CpuRegister>());
5626 } else {
5627 DCHECK(cls.IsStackSlot()) << cls;
5628 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5629 }
5630 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005631 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005632 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005633 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005634 __ testl(out, out);
5635 // If `out` is null, we use it for the result, and jump to `done`.
5636 __ j(kEqual, &done);
5637 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5638 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005639 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005640 __ movl(out, Immediate(1));
5641 __ jmp(&done);
5642 break;
5643 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005644
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005645 case TypeCheckKind::kArrayCheck: {
5646 if (cls.IsRegister()) {
5647 __ cmpl(out, cls.AsRegister<CpuRegister>());
5648 } else {
5649 DCHECK(cls.IsStackSlot()) << cls;
5650 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5651 }
5652 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005653 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5654 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005655 codegen_->AddSlowPath(slow_path);
5656 __ j(kNotEqual, slow_path->GetEntryLabel());
5657 __ movl(out, Immediate(1));
5658 if (zero.IsLinked()) {
5659 __ jmp(&done);
5660 }
5661 break;
5662 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005663
Calin Juravle98893e12015-10-02 21:05:03 +01005664 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005665 case TypeCheckKind::kInterfaceCheck: {
5666 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005667 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005668 // cases.
5669 //
5670 // We cannot directly call the InstanceofNonTrivial runtime
5671 // entry point without resorting to a type checking slow path
5672 // here (i.e. by calling InvokeRuntime directly), as it would
5673 // require to assign fixed registers for the inputs of this
5674 // HInstanceOf instruction (following the runtime calling
5675 // convention), which might be cluttered by the potential first
5676 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005677 //
5678 // TODO: Introduce a new runtime entry point taking the object
5679 // to test (instead of its class) as argument, and let it deal
5680 // with the read barrier issues. This will let us refactor this
5681 // case of the `switch` code as it was previously (with a direct
5682 // call to the runtime not using a type checking slow path).
5683 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005684 DCHECK(locations->OnlyCallsOnSlowPath());
5685 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5686 /* is_fatal */ false);
5687 codegen_->AddSlowPath(slow_path);
5688 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005689 if (zero.IsLinked()) {
5690 __ jmp(&done);
5691 }
5692 break;
5693 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005694 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005695
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005696 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005697 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005698 __ xorl(out, out);
5699 }
5700
5701 if (done.IsLinked()) {
5702 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005703 }
5704
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005705 if (slow_path != nullptr) {
5706 __ Bind(slow_path->GetExitLabel());
5707 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005708}
5709
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005710void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005711 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5712 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005713 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5714 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005715 case TypeCheckKind::kExactCheck:
5716 case TypeCheckKind::kAbstractClassCheck:
5717 case TypeCheckKind::kClassHierarchyCheck:
5718 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005719 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5720 LocationSummary::kCallOnSlowPath :
5721 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005722 break;
5723 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005724 case TypeCheckKind::kUnresolvedCheck:
5725 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005726 call_kind = LocationSummary::kCallOnSlowPath;
5727 break;
5728 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005729 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5730 locations->SetInAt(0, Location::RequiresRegister());
5731 locations->SetInAt(1, Location::Any());
5732 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5733 locations->AddTemp(Location::RequiresRegister());
5734 // When read barriers are enabled, we need an additional temporary
5735 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005736 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005737 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005738 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005739}
5740
5741void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005742 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005743 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005744 Location obj_loc = locations->InAt(0);
5745 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005746 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005747 Location temp_loc = locations->GetTemp(0);
5748 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005749 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005750 locations->GetTemp(1) :
5751 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005752 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5753 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5754 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5755 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005756
Roland Levillain0d5a2812015-11-13 10:07:31 +00005757 bool is_type_check_slow_path_fatal =
5758 (type_check_kind == TypeCheckKind::kExactCheck ||
5759 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5760 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5761 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5762 !instruction->CanThrowIntoCatchBlock();
5763 SlowPathCode* type_check_slow_path =
5764 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5765 is_type_check_slow_path_fatal);
5766 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005767
Roland Levillain0d5a2812015-11-13 10:07:31 +00005768 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005769 case TypeCheckKind::kExactCheck:
5770 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005771 NearLabel done;
5772 // Avoid null check if we know obj is not null.
5773 if (instruction->MustDoNullCheck()) {
5774 __ testl(obj, obj);
5775 __ j(kEqual, &done);
5776 }
5777
5778 // /* HeapReference<Class> */ temp = obj->klass_
5779 GenerateReferenceLoadTwoRegisters(
5780 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5781
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005782 if (cls.IsRegister()) {
5783 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5784 } else {
5785 DCHECK(cls.IsStackSlot()) << cls;
5786 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5787 }
5788 // Jump to slow path for throwing the exception or doing a
5789 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005790 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005791 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005792 break;
5793 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005794
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005795 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005796 NearLabel done;
5797 // Avoid null check if we know obj is not null.
5798 if (instruction->MustDoNullCheck()) {
5799 __ testl(obj, obj);
5800 __ j(kEqual, &done);
5801 }
5802
5803 // /* HeapReference<Class> */ temp = obj->klass_
5804 GenerateReferenceLoadTwoRegisters(
5805 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5806
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005807 // If the class is abstract, we eagerly fetch the super class of the
5808 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005809 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005810 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005811 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005812 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005813
5814 // If the class reference currently in `temp` is not null, jump
5815 // to the `compare_classes` label to compare it with the checked
5816 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005817 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005818 __ j(kNotEqual, &compare_classes);
5819 // Otherwise, jump to the slow path to throw the exception.
5820 //
5821 // But before, move back the object's class into `temp` before
5822 // going into the slow path, as it has been overwritten in the
5823 // meantime.
5824 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005825 GenerateReferenceLoadTwoRegisters(
5826 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005827 __ jmp(type_check_slow_path->GetEntryLabel());
5828
5829 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005830 if (cls.IsRegister()) {
5831 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5832 } else {
5833 DCHECK(cls.IsStackSlot()) << cls;
5834 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5835 }
5836 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005837 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005838 break;
5839 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005841 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005842 NearLabel done;
5843 // Avoid null check if we know obj is not null.
5844 if (instruction->MustDoNullCheck()) {
5845 __ testl(obj, obj);
5846 __ j(kEqual, &done);
5847 }
5848
5849 // /* HeapReference<Class> */ temp = obj->klass_
5850 GenerateReferenceLoadTwoRegisters(
5851 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5852
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005853 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005854 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005855 __ Bind(&loop);
5856 if (cls.IsRegister()) {
5857 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5858 } else {
5859 DCHECK(cls.IsStackSlot()) << cls;
5860 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5861 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005862 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005863
Roland Levillain0d5a2812015-11-13 10:07:31 +00005864 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005865 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005866
5867 // If the class reference currently in `temp` is not null, jump
5868 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005869 __ testl(temp, temp);
5870 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005871 // Otherwise, jump to the slow path to throw the exception.
5872 //
5873 // But before, move back the object's class into `temp` before
5874 // going into the slow path, as it has been overwritten in the
5875 // meantime.
5876 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005877 GenerateReferenceLoadTwoRegisters(
5878 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005879 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005880 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005881 break;
5882 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005883
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005884 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005885 // We cannot use a NearLabel here, as its range might be too
5886 // short in some cases when read barriers are enabled. This has
5887 // been observed for instance when the code emitted for this
5888 // case uses high x86-64 registers (R8-R15).
5889 Label done;
5890 // Avoid null check if we know obj is not null.
5891 if (instruction->MustDoNullCheck()) {
5892 __ testl(obj, obj);
5893 __ j(kEqual, &done);
5894 }
5895
5896 // /* HeapReference<Class> */ temp = obj->klass_
5897 GenerateReferenceLoadTwoRegisters(
5898 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5899
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005900 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005901 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005902 if (cls.IsRegister()) {
5903 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5904 } else {
5905 DCHECK(cls.IsStackSlot()) << cls;
5906 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5907 }
5908 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005909
5910 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005911 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005912 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005913
5914 // If the component type is not null (i.e. the object is indeed
5915 // an array), jump to label `check_non_primitive_component_type`
5916 // to further check that this component type is not a primitive
5917 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005918 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005919 __ j(kNotEqual, &check_non_primitive_component_type);
5920 // Otherwise, jump to the slow path to throw the exception.
5921 //
5922 // But before, move back the object's class into `temp` before
5923 // going into the slow path, as it has been overwritten in the
5924 // meantime.
5925 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005926 GenerateReferenceLoadTwoRegisters(
5927 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005928 __ jmp(type_check_slow_path->GetEntryLabel());
5929
5930 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005931 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005932 __ j(kEqual, &done);
5933 // Same comment as above regarding `temp` and the slow path.
5934 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005935 GenerateReferenceLoadTwoRegisters(
5936 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005937 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005938 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005939 break;
5940 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005941
Calin Juravle98893e12015-10-02 21:05:03 +01005942 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005943 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00005944 NearLabel done;
5945 // Avoid null check if we know obj is not null.
5946 if (instruction->MustDoNullCheck()) {
5947 __ testl(obj, obj);
5948 __ j(kEqual, &done);
5949 }
5950
5951 // /* HeapReference<Class> */ temp = obj->klass_
5952 GenerateReferenceLoadTwoRegisters(
5953 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5954
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005955 // We always go into the type check slow path for the unresolved
5956 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005957 //
5958 // We cannot directly call the CheckCast runtime entry point
5959 // without resorting to a type checking slow path here (i.e. by
5960 // calling InvokeRuntime directly), as it would require to
5961 // assign fixed registers for the inputs of this HInstanceOf
5962 // instruction (following the runtime calling convention), which
5963 // might be cluttered by the potential first read barrier
5964 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005965 //
5966 // TODO: Introduce a new runtime entry point taking the object
5967 // to test (instead of its class) as argument, and let it deal
5968 // with the read barrier issues. This will let us refactor this
5969 // case of the `switch` code as it was previously (with a direct
5970 // call to the runtime not using a type checking slow path).
5971 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005972 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005973 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005974 break;
5975 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005976
Roland Levillain0d5a2812015-11-13 10:07:31 +00005977 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005978}
5979
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005980void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
5981 LocationSummary* locations =
5982 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5983 InvokeRuntimeCallingConvention calling_convention;
5984 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5985}
5986
5987void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005988 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
5989 : QUICK_ENTRY_POINT(pUnlockObject),
5990 instruction,
5991 instruction->GetDexPc(),
5992 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005993 if (instruction->IsEnter()) {
5994 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5995 } else {
5996 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5997 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005998}
5999
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006000void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6001void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6002void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6003
6004void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6005 LocationSummary* locations =
6006 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6007 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6008 || instruction->GetResultType() == Primitive::kPrimLong);
6009 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006010 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006011 locations->SetOut(Location::SameAsFirstInput());
6012}
6013
6014void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6015 HandleBitwiseOperation(instruction);
6016}
6017
6018void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6019 HandleBitwiseOperation(instruction);
6020}
6021
6022void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6023 HandleBitwiseOperation(instruction);
6024}
6025
6026void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6027 LocationSummary* locations = instruction->GetLocations();
6028 Location first = locations->InAt(0);
6029 Location second = locations->InAt(1);
6030 DCHECK(first.Equals(locations->Out()));
6031
6032 if (instruction->GetResultType() == Primitive::kPrimInt) {
6033 if (second.IsRegister()) {
6034 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006035 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006036 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006037 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006038 } else {
6039 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006040 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006041 }
6042 } else if (second.IsConstant()) {
6043 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6044 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006045 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006046 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006047 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006048 } else {
6049 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006050 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006051 }
6052 } else {
6053 Address address(CpuRegister(RSP), second.GetStackIndex());
6054 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006055 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006056 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006057 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006058 } else {
6059 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006060 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006061 }
6062 }
6063 } else {
6064 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006065 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6066 bool second_is_constant = false;
6067 int64_t value = 0;
6068 if (second.IsConstant()) {
6069 second_is_constant = true;
6070 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006071 }
Mark Mendell40741f32015-04-20 22:10:34 -04006072 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006073
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006074 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006075 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006076 if (is_int32_value) {
6077 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6078 } else {
6079 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6080 }
6081 } else if (second.IsDoubleStackSlot()) {
6082 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006083 } else {
6084 __ andq(first_reg, second.AsRegister<CpuRegister>());
6085 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006086 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006087 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006088 if (is_int32_value) {
6089 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6090 } else {
6091 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6092 }
6093 } else if (second.IsDoubleStackSlot()) {
6094 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006095 } else {
6096 __ orq(first_reg, second.AsRegister<CpuRegister>());
6097 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006098 } else {
6099 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006100 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006101 if (is_int32_value) {
6102 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6103 } else {
6104 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6105 }
6106 } else if (second.IsDoubleStackSlot()) {
6107 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006108 } else {
6109 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6110 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006111 }
6112 }
6113}
6114
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006115void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6116 Location out,
6117 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006118 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006119 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6120 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006121 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006122 if (kUseBakerReadBarrier) {
6123 // Load with fast path based Baker's read barrier.
6124 // /* HeapReference<Object> */ out = *(out + offset)
6125 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006126 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006127 } else {
6128 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006129 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006130 // in the following move operation, as we will need it for the
6131 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006132 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006133 // /* HeapReference<Object> */ out = *(out + offset)
6134 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006135 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006136 }
6137 } else {
6138 // Plain load with no read barrier.
6139 // /* HeapReference<Object> */ out = *(out + offset)
6140 __ movl(out_reg, Address(out_reg, offset));
6141 __ MaybeUnpoisonHeapReference(out_reg);
6142 }
6143}
6144
6145void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6146 Location out,
6147 Location obj,
6148 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006149 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006150 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6151 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6152 if (kEmitCompilerReadBarrier) {
6153 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006154 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006155 // Load with fast path based Baker's read barrier.
6156 // /* HeapReference<Object> */ out = *(obj + offset)
6157 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006158 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006159 } else {
6160 // Load with slow path based read barrier.
6161 // /* HeapReference<Object> */ out = *(obj + offset)
6162 __ movl(out_reg, Address(obj_reg, offset));
6163 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6164 }
6165 } else {
6166 // Plain load with no read barrier.
6167 // /* HeapReference<Object> */ out = *(obj + offset)
6168 __ movl(out_reg, Address(obj_reg, offset));
6169 __ MaybeUnpoisonHeapReference(out_reg);
6170 }
6171}
6172
6173void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6174 Location root,
6175 CpuRegister obj,
6176 uint32_t offset) {
6177 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6178 if (kEmitCompilerReadBarrier) {
6179 if (kUseBakerReadBarrier) {
6180 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6181 // Baker's read barrier are used:
6182 //
6183 // root = obj.field;
6184 // if (Thread::Current()->GetIsGcMarking()) {
6185 // root = ReadBarrier::Mark(root)
6186 // }
6187
6188 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6189 __ movl(root_reg, Address(obj, offset));
6190 static_assert(
6191 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6192 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6193 "have different sizes.");
6194 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6195 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6196 "have different sizes.");
6197
6198 // Slow path used to mark the GC root `root`.
6199 SlowPathCode* slow_path =
6200 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6201 codegen_->AddSlowPath(slow_path);
6202
6203 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6204 /* no_rip */ true),
6205 Immediate(0));
6206 __ j(kNotEqual, slow_path->GetEntryLabel());
6207 __ Bind(slow_path->GetExitLabel());
6208 } else {
6209 // GC root loaded through a slow path for read barriers other
6210 // than Baker's.
6211 // /* GcRoot<mirror::Object>* */ root = obj + offset
6212 __ leaq(root_reg, Address(obj, offset));
6213 // /* mirror::Object* */ root = root->Read()
6214 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6215 }
6216 } else {
6217 // Plain GC root load with no read barrier.
6218 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6219 __ movl(root_reg, Address(obj, offset));
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006220 // Note that GC roots are not affected by heap poisoning, thus we
6221 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006222 }
6223}
6224
6225void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6226 Location ref,
6227 CpuRegister obj,
6228 uint32_t offset,
6229 Location temp,
6230 bool needs_null_check) {
6231 DCHECK(kEmitCompilerReadBarrier);
6232 DCHECK(kUseBakerReadBarrier);
6233
6234 // /* HeapReference<Object> */ ref = *(obj + offset)
6235 Address src(obj, offset);
6236 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6237}
6238
6239void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6240 Location ref,
6241 CpuRegister obj,
6242 uint32_t data_offset,
6243 Location index,
6244 Location temp,
6245 bool needs_null_check) {
6246 DCHECK(kEmitCompilerReadBarrier);
6247 DCHECK(kUseBakerReadBarrier);
6248
6249 // /* HeapReference<Object> */ ref =
6250 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6251 Address src = index.IsConstant() ?
6252 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6253 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6254 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6255}
6256
6257void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6258 Location ref,
6259 CpuRegister obj,
6260 const Address& src,
6261 Location temp,
6262 bool needs_null_check) {
6263 DCHECK(kEmitCompilerReadBarrier);
6264 DCHECK(kUseBakerReadBarrier);
6265
6266 // In slow path based read barriers, the read barrier call is
6267 // inserted after the original load. However, in fast path based
6268 // Baker's read barriers, we need to perform the load of
6269 // mirror::Object::monitor_ *before* the original reference load.
6270 // This load-load ordering is required by the read barrier.
6271 // The fast path/slow path (for Baker's algorithm) should look like:
6272 //
6273 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6274 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6275 // HeapReference<Object> ref = *src; // Original reference load.
6276 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6277 // if (is_gray) {
6278 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6279 // }
6280 //
6281 // Note: the original implementation in ReadBarrier::Barrier is
6282 // slightly more complex as:
6283 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006284 // the high-bits of rb_state, which are expected to be all zeroes
6285 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6286 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006287 // - it performs additional checks that we do not do here for
6288 // performance reasons.
6289
6290 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6291 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6292 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6293
6294 // /* int32_t */ monitor = obj->monitor_
6295 __ movl(temp_reg, Address(obj, monitor_offset));
6296 if (needs_null_check) {
6297 MaybeRecordImplicitNullCheck(instruction);
6298 }
6299 // /* LockWord */ lock_word = LockWord(monitor)
6300 static_assert(sizeof(LockWord) == sizeof(int32_t),
6301 "art::LockWord and int32_t have different sizes.");
6302 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6303 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6304 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6305 static_assert(
6306 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6307 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6308
6309 // Load fence to prevent load-load reordering.
6310 // Note that this is a no-op, thanks to the x86-64 memory model.
6311 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6312
6313 // The actual reference load.
6314 // /* HeapReference<Object> */ ref = *src
6315 __ movl(ref_reg, src);
6316
6317 // Object* ref = ref_addr->AsMirrorPtr()
6318 __ MaybeUnpoisonHeapReference(ref_reg);
6319
6320 // Slow path used to mark the object `ref` when it is gray.
6321 SlowPathCode* slow_path =
6322 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6323 AddSlowPath(slow_path);
6324
6325 // if (rb_state == ReadBarrier::gray_ptr_)
6326 // ref = ReadBarrier::Mark(ref);
6327 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6328 __ j(kEqual, slow_path->GetEntryLabel());
6329 __ Bind(slow_path->GetExitLabel());
6330}
6331
6332void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6333 Location out,
6334 Location ref,
6335 Location obj,
6336 uint32_t offset,
6337 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006338 DCHECK(kEmitCompilerReadBarrier);
6339
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006340 // Insert a slow path based read barrier *after* the reference load.
6341 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006342 // If heap poisoning is enabled, the unpoisoning of the loaded
6343 // reference will be carried out by the runtime within the slow
6344 // path.
6345 //
6346 // Note that `ref` currently does not get unpoisoned (when heap
6347 // poisoning is enabled), which is alright as the `ref` argument is
6348 // not used by the artReadBarrierSlow entry point.
6349 //
6350 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6351 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6352 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6353 AddSlowPath(slow_path);
6354
Roland Levillain0d5a2812015-11-13 10:07:31 +00006355 __ jmp(slow_path->GetEntryLabel());
6356 __ Bind(slow_path->GetExitLabel());
6357}
6358
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006359void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6360 Location out,
6361 Location ref,
6362 Location obj,
6363 uint32_t offset,
6364 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006365 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006366 // Baker's read barriers shall be handled by the fast path
6367 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6368 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006369 // If heap poisoning is enabled, unpoisoning will be taken care of
6370 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006371 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006372 } else if (kPoisonHeapReferences) {
6373 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6374 }
6375}
6376
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006377void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6378 Location out,
6379 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006380 DCHECK(kEmitCompilerReadBarrier);
6381
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006382 // Insert a slow path based read barrier *after* the GC root load.
6383 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006384 // Note that GC roots are not affected by heap poisoning, so we do
6385 // not need to do anything special for this here.
6386 SlowPathCode* slow_path =
6387 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6388 AddSlowPath(slow_path);
6389
Roland Levillain0d5a2812015-11-13 10:07:31 +00006390 __ jmp(slow_path->GetEntryLabel());
6391 __ Bind(slow_path->GetExitLabel());
6392}
6393
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006394void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006395 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006396 LOG(FATAL) << "Unreachable";
6397}
6398
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006399void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006400 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006401 LOG(FATAL) << "Unreachable";
6402}
6403
Mark Mendellfe57faa2015-09-18 09:26:15 -04006404// Simple implementation of packed switch - generate cascaded compare/jumps.
6405void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6406 LocationSummary* locations =
6407 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6408 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006409 locations->AddTemp(Location::RequiresRegister());
6410 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006411}
6412
6413void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6414 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006415 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006416 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006417 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6418 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6419 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006420 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6421
6422 // Should we generate smaller inline compare/jumps?
6423 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6424 // Figure out the correct compare values and jump conditions.
6425 // Handle the first compare/branch as a special case because it might
6426 // jump to the default case.
6427 DCHECK_GT(num_entries, 2u);
6428 Condition first_condition;
6429 uint32_t index;
6430 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6431 if (lower_bound != 0) {
6432 first_condition = kLess;
6433 __ cmpl(value_reg_in, Immediate(lower_bound));
6434 __ j(first_condition, codegen_->GetLabelOf(default_block));
6435 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6436
6437 index = 1;
6438 } else {
6439 // Handle all the compare/jumps below.
6440 first_condition = kBelow;
6441 index = 0;
6442 }
6443
6444 // Handle the rest of the compare/jumps.
6445 for (; index + 1 < num_entries; index += 2) {
6446 int32_t compare_to_value = lower_bound + index + 1;
6447 __ cmpl(value_reg_in, Immediate(compare_to_value));
6448 // Jump to successors[index] if value < case_value[index].
6449 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6450 // Jump to successors[index + 1] if value == case_value[index + 1].
6451 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6452 }
6453
6454 if (index != num_entries) {
6455 // There are an odd number of entries. Handle the last one.
6456 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006457 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006458 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6459 }
6460
6461 // And the default for any other value.
6462 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6463 __ jmp(codegen_->GetLabelOf(default_block));
6464 }
6465 return;
6466 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006467
6468 // Remove the bias, if needed.
6469 Register value_reg_out = value_reg_in.AsRegister();
6470 if (lower_bound != 0) {
6471 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6472 value_reg_out = temp_reg.AsRegister();
6473 }
6474 CpuRegister value_reg(value_reg_out);
6475
6476 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006477 __ cmpl(value_reg, Immediate(num_entries - 1));
6478 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006479
Mark Mendell9c86b482015-09-18 13:36:07 -04006480 // We are in the range of the table.
6481 // Load the address of the jump table in the constant area.
6482 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006483
Mark Mendell9c86b482015-09-18 13:36:07 -04006484 // Load the (signed) offset from the jump table.
6485 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6486
6487 // Add the offset to the address of the table base.
6488 __ addq(temp_reg, base_reg);
6489
6490 // And jump.
6491 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006492}
6493
Aart Bikc5d47542016-01-27 17:00:35 -08006494void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6495 if (value == 0) {
6496 __ xorl(dest, dest);
6497 } else {
6498 __ movl(dest, Immediate(value));
6499 }
6500}
6501
Mark Mendell92e83bf2015-05-07 11:25:03 -04006502void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6503 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006504 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006505 __ xorl(dest, dest);
6506 } else if (value > 0 && IsInt<32>(value)) {
6507 // We can use a 32 bit move, as it will zero-extend and is one byte shorter.
6508 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6509 } else {
6510 __ movq(dest, Immediate(value));
6511 }
6512}
6513
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006514void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6515 if (value == 0) {
6516 __ xorps(dest, dest);
6517 } else {
6518 __ movss(dest, LiteralInt32Address(value));
6519 }
6520}
6521
6522void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6523 if (value == 0) {
6524 __ xorpd(dest, dest);
6525 } else {
6526 __ movsd(dest, LiteralInt64Address(value));
6527 }
6528}
6529
6530void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6531 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6532}
6533
6534void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6535 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6536}
6537
Aart Bika19616e2016-02-01 18:57:58 -08006538void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6539 if (value == 0) {
6540 __ testl(dest, dest);
6541 } else {
6542 __ cmpl(dest, Immediate(value));
6543 }
6544}
6545
6546void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6547 if (IsInt<32>(value)) {
6548 if (value == 0) {
6549 __ testq(dest, dest);
6550 } else {
6551 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6552 }
6553 } else {
6554 // Value won't fit in an int.
6555 __ cmpq(dest, LiteralInt64Address(value));
6556 }
6557}
6558
Mark Mendellcfa410b2015-05-25 16:02:44 -04006559void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6560 DCHECK(dest.IsDoubleStackSlot());
6561 if (IsInt<32>(value)) {
6562 // Can move directly as an int32 constant.
6563 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6564 Immediate(static_cast<int32_t>(value)));
6565 } else {
6566 Load64BitValue(CpuRegister(TMP), value);
6567 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6568 }
6569}
6570
Mark Mendell9c86b482015-09-18 13:36:07 -04006571/**
6572 * Class to handle late fixup of offsets into constant area.
6573 */
6574class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6575 public:
6576 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6577 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6578
6579 protected:
6580 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6581
6582 CodeGeneratorX86_64* codegen_;
6583
6584 private:
6585 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6586 // Patch the correct offset for the instruction. We use the address of the
6587 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6588 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6589 int32_t relative_position = constant_offset - pos;
6590
6591 // Patch in the right value.
6592 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6593 }
6594
6595 // Location in constant area that the fixup refers to.
6596 size_t offset_into_constant_area_;
6597};
6598
6599/**
6600 t * Class to handle late fixup of offsets to a jump table that will be created in the
6601 * constant area.
6602 */
6603class JumpTableRIPFixup : public RIPFixup {
6604 public:
6605 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6606 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6607
6608 void CreateJumpTable() {
6609 X86_64Assembler* assembler = codegen_->GetAssembler();
6610
6611 // Ensure that the reference to the jump table has the correct offset.
6612 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6613 SetOffset(offset_in_constant_table);
6614
6615 // Compute the offset from the start of the function to this jump table.
6616 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6617
6618 // Populate the jump table with the correct values for the jump table.
6619 int32_t num_entries = switch_instr_->GetNumEntries();
6620 HBasicBlock* block = switch_instr_->GetBlock();
6621 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6622 // The value that we want is the target offset - the position of the table.
6623 for (int32_t i = 0; i < num_entries; i++) {
6624 HBasicBlock* b = successors[i];
6625 Label* l = codegen_->GetLabelOf(b);
6626 DCHECK(l->IsBound());
6627 int32_t offset_to_block = l->Position() - current_table_offset;
6628 assembler->AppendInt32(offset_to_block);
6629 }
6630 }
6631
6632 private:
6633 const HPackedSwitch* switch_instr_;
6634};
6635
Mark Mendellf55c3e02015-03-26 21:07:46 -04006636void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6637 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006638 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006639 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6640 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006641 assembler->Align(4, 0);
6642 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006643
6644 // Populate any jump tables.
6645 for (auto jump_table : fixups_to_jump_tables_) {
6646 jump_table->CreateJumpTable();
6647 }
6648
6649 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006650 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006651 }
6652
6653 // And finish up.
6654 CodeGenerator::Finalize(allocator);
6655}
6656
Mark Mendellf55c3e02015-03-26 21:07:46 -04006657Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6658 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6659 return Address::RIP(fixup);
6660}
6661
6662Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6663 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6664 return Address::RIP(fixup);
6665}
6666
6667Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6668 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6669 return Address::RIP(fixup);
6670}
6671
6672Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6673 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6674 return Address::RIP(fixup);
6675}
6676
Andreas Gampe85b62f22015-09-09 13:15:38 -07006677// TODO: trg as memory.
6678void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6679 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006680 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006681 return;
6682 }
6683
6684 DCHECK_NE(type, Primitive::kPrimVoid);
6685
6686 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6687 if (trg.Equals(return_loc)) {
6688 return;
6689 }
6690
6691 // Let the parallel move resolver take care of all of this.
6692 HParallelMove parallel_move(GetGraph()->GetArena());
6693 parallel_move.AddMove(return_loc, trg, type, nullptr);
6694 GetMoveResolver()->EmitNativeCode(&parallel_move);
6695}
6696
Mark Mendell9c86b482015-09-18 13:36:07 -04006697Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6698 // Create a fixup to be used to create and address the jump table.
6699 JumpTableRIPFixup* table_fixup =
6700 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6701
6702 // We have to populate the jump tables.
6703 fixups_to_jump_tables_.push_back(table_fixup);
6704 return Address::RIP(table_fixup);
6705}
6706
Mark Mendellea5af682015-10-22 17:35:49 -04006707void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6708 const Address& addr_high,
6709 int64_t v,
6710 HInstruction* instruction) {
6711 if (IsInt<32>(v)) {
6712 int32_t v_32 = v;
6713 __ movq(addr_low, Immediate(v_32));
6714 MaybeRecordImplicitNullCheck(instruction);
6715 } else {
6716 // Didn't fit in a register. Do it in pieces.
6717 int32_t low_v = Low32Bits(v);
6718 int32_t high_v = High32Bits(v);
6719 __ movl(addr_low, Immediate(low_v));
6720 MaybeRecordImplicitNullCheck(instruction);
6721 __ movl(addr_high, Immediate(high_v));
6722 }
6723}
6724
Roland Levillain4d027112015-07-01 15:41:14 +01006725#undef __
6726
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006727} // namespace x86_64
6728} // namespace art