blob: 54da87919c74a6875237e368a26af36867431fe0 [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010018
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000019#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +000024#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010025#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010026#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "intrinsics.h"
29#include "intrinsics_x86.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000030#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010031#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070032#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070033#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070034#include "mirror/class-inl.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000035#include "scoped_thread_state_change-inl.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010036#include "thread.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000037#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010038#include "utils/stack_checks.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000039#include "utils/x86/assembler_x86.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010040#include "utils/x86/managed_register_x86.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000041
Vladimir Marko0a516052019-10-14 13:00:44 +000042namespace art {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010043
Roland Levillain0d5a2812015-11-13 10:07:31 +000044template<class MirrorType>
45class GcRoot;
46
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000047namespace x86 {
48
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010049static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010050static constexpr Register kMethodRegisterArgument = EAX;
Mark Mendell5f874182015-03-04 15:42:45 -050051static constexpr Register kCoreCalleeSaves[] = { EBP, ESI, EDI };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010052
Mark Mendell24f2dfa2015-01-14 19:51:45 -050053static constexpr int kC2ConditionMask = 0x400;
54
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000055static constexpr int kFakeReturnRegister = Register(8);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +000056
Aart Bik1f8d51b2018-02-15 10:42:37 -080057static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
58static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
59
Vladimir Marko3232dbb2018-07-25 15:42:46 +010060static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
61 InvokeRuntimeCallingConvention calling_convention;
62 RegisterSet caller_saves = RegisterSet::Empty();
63 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
64 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
65 // that the the kPrimNot result register is the same as the first argument register.
66 return caller_saves;
67}
68
Roland Levillain7cbd27f2016-08-11 23:53:33 +010069// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
70#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070071#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010072
Andreas Gampe85b62f22015-09-09 13:15:38 -070073class NullCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000075 explicit NullCheckSlowPathX86(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010076
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010077 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +010078 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000080 if (instruction_->CanThrowIntoCatchBlock()) {
81 // Live registers will be restored in the catch block if caught.
82 SaveLiveRegisters(codegen, instruction_->GetLocations());
83 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010084 x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexandre Rames8158f282015-08-07 10:26:17 +010085 instruction_,
86 instruction_->GetDexPc(),
87 this);
Roland Levillain888d0672015-11-23 18:53:50 +000088 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010089 }
90
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010091 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010092
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010093 const char* GetDescription() const override { return "NullCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010094
Nicolas Geoffraye5038322014-07-04 09:41:32 +010095 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010096 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
97};
98
Andreas Gampe85b62f22015-09-09 13:15:38 -070099class DivZeroCheckSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000100 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000101 explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000102
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100103 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100104 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000105 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100106 x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000107 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000108 }
109
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100110 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100111
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100112 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100113
Calin Juravled0d48522014-11-04 16:40:20 +0000114 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000115 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
116};
117
Andreas Gampe85b62f22015-09-09 13:15:38 -0700118class DivRemMinusOneSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000119 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 DivRemMinusOneSlowPathX86(HInstruction* instruction, Register reg, bool is_div)
121 : SlowPathCode(instruction), reg_(reg), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000122
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100123 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000124 __ Bind(GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +0000125 if (is_div_) {
126 __ negl(reg_);
127 } else {
128 __ movl(reg_, Immediate(0));
129 }
Calin Juravled0d48522014-11-04 16:40:20 +0000130 __ jmp(GetExitLabel());
131 }
132
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100133 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100134
Calin Juravled0d48522014-11-04 16:40:20 +0000135 private:
136 Register reg_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 bool is_div_;
138 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
Calin Juravled0d48522014-11-04 16:40:20 +0000139};
140
Andreas Gampe85b62f22015-09-09 13:15:38 -0700141class BoundsCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100142 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000143 explicit BoundsCheckSlowPathX86(HBoundsCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100144
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100145 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100146 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100147 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100148 __ Bind(GetEntryLabel());
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000149 // We're moving two locations to locations that could overlap, so we need a parallel
150 // move resolver.
David Brazdil77a48ae2015-09-15 12:34:04 +0000151 if (instruction_->CanThrowIntoCatchBlock()) {
152 // Live registers will be restored in the catch block if caught.
153 SaveLiveRegisters(codegen, instruction_->GetLocations());
154 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400155
156 // Are we using an array length from memory?
157 HInstruction* array_length = instruction_->InputAt(1);
158 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100159 InvokeRuntimeCallingConvention calling_convention;
Mark Mendellee8d9712016-07-12 11:13:15 -0400160 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
161 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100162 HArrayLength* length = array_length->AsArrayLength();
163 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400164 Location array_loc = array_length->GetLocations()->InAt(0);
165 Address array_len(array_loc.AsRegister<Register>(), len_offset);
166 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
167 // Check for conflicts with index.
168 if (length_loc.Equals(locations->InAt(0))) {
169 // We know we aren't using parameter 2.
170 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
171 }
172 __ movl(length_loc.AsRegister<Register>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100173 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100174 __ shrl(length_loc.AsRegister<Register>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700175 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400176 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000177 x86_codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000179 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100180 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400181 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100182 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100183 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100184 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
185 ? kQuickThrowStringBounds
186 : kQuickThrowArrayBounds;
187 x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100188 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000189 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190 }
191
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100192 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100193
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100194 const char* GetDescription() const override { return "BoundsCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100195
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100196 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100197 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
198};
199
Andreas Gampe85b62f22015-09-09 13:15:38 -0700200class SuspendCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000201 public:
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000202 SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000203 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000204
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100205 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700206 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100207 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000208 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700209 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100210 x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000211 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700212 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100213 if (successor_ == nullptr) {
214 __ jmp(GetReturnLabel());
215 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100216 __ jmp(x86_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100217 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000218 }
219
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100220 Label* GetReturnLabel() {
221 DCHECK(successor_ == nullptr);
222 return &return_label_;
223 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000224
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100225 HBasicBlock* GetSuccessor() const {
226 return successor_;
227 }
228
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100229 const char* GetDescription() const override { return "SuspendCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100230
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000231 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100232 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000233 Label return_label_;
234
235 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
236};
237
Vladimir Markoaad75c62016-10-03 08:46:48 +0000238class LoadStringSlowPathX86 : public SlowPathCode {
239 public:
240 explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {}
241
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100242 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000243 LocationSummary* locations = instruction_->GetLocations();
244 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
245
246 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
247 __ Bind(GetEntryLabel());
248 SaveLiveRegisters(codegen, locations);
249
250 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000251 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
252 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000253 x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
254 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
255 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
256 RestoreLiveRegisters(codegen, locations);
257
Vladimir Markoaad75c62016-10-03 08:46:48 +0000258 __ jmp(GetExitLabel());
259 }
260
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100261 const char* GetDescription() const override { return "LoadStringSlowPathX86"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000262
263 private:
264 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
265};
266
Andreas Gampe85b62f22015-09-09 13:15:38 -0700267class LoadClassSlowPathX86 : public SlowPathCode {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000268 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100269 LoadClassSlowPathX86(HLoadClass* cls, HInstruction* at)
270 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100272 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000273 }
274
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100275 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000276 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100277 Location out = locations->Out();
278 const uint32_t dex_pc = instruction_->GetDexPc();
279 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
280 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
281
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000282 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
283 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000284 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000285
286 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100287 if (must_resolve_type) {
288 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_codegen->GetGraph()->GetDexFile()));
289 dex::TypeIndex type_index = cls_->GetTypeIndex();
290 __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100291 x86_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
292 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100293 // If we also must_do_clinit, the resolved type is now in the correct register.
294 } else {
295 DCHECK(must_do_clinit);
296 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
297 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), source);
298 }
299 if (must_do_clinit) {
300 x86_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
301 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000302 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000303
304 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000305 if (out.IsValid()) {
306 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
307 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000308 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000309 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000310 __ jmp(GetExitLabel());
311 }
312
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100313 const char* GetDescription() const override { return "LoadClassSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100314
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000315 private:
316 // The class this slow path will load.
317 HLoadClass* const cls_;
318
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000319 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
320};
321
Andreas Gampe85b62f22015-09-09 13:15:38 -0700322class TypeCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000323 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000324 TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000325 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100327 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000328 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000329 DCHECK(instruction_->IsCheckCast()
330 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000331
332 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
333 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000334
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000335 if (kPoisonHeapReferences &&
336 instruction_->IsCheckCast() &&
337 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
338 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
339 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<Register>());
340 }
341
Vladimir Marko87584542017-12-12 17:47:52 +0000342 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343 SaveLiveRegisters(codegen, locations);
344 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000345
346 // We're moving two locations to locations that could overlap, so we need a parallel
347 // move resolver.
348 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800349 x86_codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800350 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100351 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800352 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800353 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100354 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000355 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100356 x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Alexandre Rames8158f282015-08-07 10:26:17 +0100357 instruction_,
358 instruction_->GetDexPc(),
359 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800360 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000361 } else {
362 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800363 x86_codegen->InvokeRuntime(kQuickCheckInstanceOf,
364 instruction_,
365 instruction_->GetDexPc(),
366 this);
367 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000368 }
369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 if (!is_fatal_) {
371 if (instruction_->IsInstanceOf()) {
372 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
373 }
374 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray75374372015-09-17 17:12:19 +0000375
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000376 __ jmp(GetExitLabel());
377 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000378 }
379
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100380 const char* GetDescription() const override { return "TypeCheckSlowPathX86"; }
381 bool IsFatal() const override { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100382
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000383 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000384 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000385
386 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
387};
388
Andreas Gampe85b62f22015-09-09 13:15:38 -0700389class DeoptimizationSlowPathX86 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700390 public:
Aart Bik42249c32016-01-07 15:33:50 -0800391 explicit DeoptimizationSlowPathX86(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000392 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100394 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100395 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700396 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100397 LocationSummary* locations = instruction_->GetLocations();
398 SaveLiveRegisters(codegen, locations);
399 InvokeRuntimeCallingConvention calling_convention;
400 x86_codegen->Load32BitValue(
401 calling_convention.GetRegisterAt(0),
402 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100403 x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100404 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700405 }
406
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100407 const char* GetDescription() const override { return "DeoptimizationSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100408
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700409 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700410 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
411};
412
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100413class ArraySetSlowPathX86 : public SlowPathCode {
414 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000415 explicit ArraySetSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100416
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100417 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100418 LocationSummary* locations = instruction_->GetLocations();
419 __ Bind(GetEntryLabel());
420 SaveLiveRegisters(codegen, locations);
421
422 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100423 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424 parallel_move.AddMove(
425 locations->InAt(0),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100427 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100428 nullptr);
429 parallel_move.AddMove(
430 locations->InAt(1),
431 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100432 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100433 nullptr);
434 parallel_move.AddMove(
435 locations->InAt(2),
436 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100437 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100438 nullptr);
439 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
440
441 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100442 x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000443 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 RestoreLiveRegisters(codegen, locations);
445 __ jmp(GetExitLabel());
446 }
447
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100448 const char* GetDescription() const override { return "ArraySetSlowPathX86"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100449
450 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100451 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86);
452};
453
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100454// Slow path marking an object reference `ref` during a read
455// barrier. The field `obj.field` in the object `obj` holding this
456// reference does not get updated by this slow path after marking (see
457// ReadBarrierMarkAndUpdateFieldSlowPathX86 below for that).
458//
459// This means that after the execution of this slow path, `ref` will
460// always be up-to-date, but `obj.field` may not; i.e., after the
461// flip, `ref` will be a to-space reference, but `obj.field` will
462// probably still be a from-space reference (unless it gets updated by
463// another thread, or if another thread installed another object
464// reference (different from `ref`) in `obj.field`).
Roland Levillain7c1559a2015-12-15 10:55:36 +0000465class ReadBarrierMarkSlowPathX86 : public SlowPathCode {
466 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100467 ReadBarrierMarkSlowPathX86(HInstruction* instruction,
468 Location ref,
469 bool unpoison_ref_before_marking)
470 : SlowPathCode(instruction),
471 ref_(ref),
472 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000473 DCHECK(kEmitCompilerReadBarrier);
474 }
475
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100476 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86"; }
Roland Levillain7c1559a2015-12-15 10:55:36 +0000477
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100478 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000479 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +0000481 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100482 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000483 DCHECK(instruction_->IsInstanceFieldGet() ||
484 instruction_->IsStaticFieldGet() ||
485 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100486 instruction_->IsArraySet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000487 instruction_->IsLoadClass() ||
488 instruction_->IsLoadString() ||
489 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100490 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100491 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
492 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000493 << "Unexpected instruction in read barrier marking slow path: "
494 << instruction_->DebugName();
495
496 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100497 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000498 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100499 __ MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000500 }
Roland Levillain4359e612016-07-20 11:32:19 +0100501 // No need to save live registers; it's taken care of by the
502 // entrypoint. Also, there is no need to update the stack mask,
503 // as this runtime call will not trigger a garbage collection.
Roland Levillain7c1559a2015-12-15 10:55:36 +0000504 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100505 DCHECK_NE(ref_reg, ESP);
506 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100507 // "Compact" slow path, saving two moves.
508 //
509 // Instead of using the standard runtime calling convention (input
510 // and output in EAX):
511 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100512 // EAX <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100513 // EAX <- ReadBarrierMark(EAX)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100514 // ref <- EAX
Roland Levillain02b75802016-07-13 11:54:35 +0100515 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100516 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100517 // of a dedicated entrypoint:
518 //
519 // rX <- ReadBarrierMarkRegX(rX)
520 //
Roland Levillain97c46462017-05-11 14:04:03 +0100521 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100522 // This runtime call does not require a stack map.
523 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000524 __ jmp(GetExitLabel());
525 }
526
527 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528 // The location (register) of the marked object reference.
529 const Location ref_;
530 // Should the reference in `ref_` be unpoisoned prior to marking it?
531 const bool unpoison_ref_before_marking_;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000532
533 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86);
534};
535
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100536// Slow path marking an object reference `ref` during a read barrier,
537// and if needed, atomically updating the field `obj.field` in the
538// object `obj` holding this reference after marking (contrary to
539// ReadBarrierMarkSlowPathX86 above, which never tries to update
540// `obj.field`).
541//
542// This means that after the execution of this slow path, both `ref`
543// and `obj.field` will be up-to-date; i.e., after the flip, both will
544// hold the same to-space reference (unless another thread installed
545// another object reference (different from `ref`) in `obj.field`).
546class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
547 public:
548 ReadBarrierMarkAndUpdateFieldSlowPathX86(HInstruction* instruction,
549 Location ref,
550 Register obj,
551 const Address& field_addr,
552 bool unpoison_ref_before_marking,
553 Register temp)
554 : SlowPathCode(instruction),
555 ref_(ref),
556 obj_(obj),
557 field_addr_(field_addr),
558 unpoison_ref_before_marking_(unpoison_ref_before_marking),
559 temp_(temp) {
560 DCHECK(kEmitCompilerReadBarrier);
561 }
562
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100563 const char* GetDescription() const override { return "ReadBarrierMarkAndUpdateFieldSlowPathX86"; }
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100564
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100565 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100566 LocationSummary* locations = instruction_->GetLocations();
567 Register ref_reg = ref_.AsRegister<Register>();
568 DCHECK(locations->CanCall());
569 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
570 // This slow path is only used by the UnsafeCASObject intrinsic.
571 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
572 << "Unexpected instruction in read barrier marking and field updating slow path: "
573 << instruction_->DebugName();
574 DCHECK(instruction_->GetLocations()->Intrinsified());
575 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
576
577 __ Bind(GetEntryLabel());
578 if (unpoison_ref_before_marking_) {
579 // Object* ref = ref_addr->AsMirrorPtr()
580 __ MaybeUnpoisonHeapReference(ref_reg);
581 }
582
583 // Save the old (unpoisoned) reference.
584 __ movl(temp_, ref_reg);
585
586 // No need to save live registers; it's taken care of by the
587 // entrypoint. Also, there is no need to update the stack mask,
588 // as this runtime call will not trigger a garbage collection.
589 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
590 DCHECK_NE(ref_reg, ESP);
591 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
592 // "Compact" slow path, saving two moves.
593 //
594 // Instead of using the standard runtime calling convention (input
595 // and output in EAX):
596 //
597 // EAX <- ref
598 // EAX <- ReadBarrierMark(EAX)
599 // ref <- EAX
600 //
601 // we just use rX (the register containing `ref`) as input and output
602 // of a dedicated entrypoint:
603 //
604 // rX <- ReadBarrierMarkRegX(rX)
605 //
Roland Levillain97c46462017-05-11 14:04:03 +0100606 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100607 // This runtime call does not require a stack map.
608 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
609
610 // If the new reference is different from the old reference,
611 // update the field in the holder (`*field_addr`).
612 //
613 // Note that this field could also hold a different object, if
614 // another thread had concurrently changed it. In that case, the
615 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
616 // operation below would abort the CAS, leaving the field as-is.
617 NearLabel done;
618 __ cmpl(temp_, ref_reg);
619 __ j(kEqual, &done);
620
621 // Update the the holder's field atomically. This may fail if
622 // mutator updates before us, but it's OK. This is achieved
623 // using a strong compare-and-set (CAS) operation with relaxed
624 // memory synchronization ordering, where the expected value is
625 // the old reference and the desired value is the new reference.
626 // This operation is implemented with a 32-bit LOCK CMPXLCHG
627 // instruction, which requires the expected value (the old
628 // reference) to be in EAX. Save EAX beforehand, and move the
629 // expected value (stored in `temp_`) into EAX.
630 __ pushl(EAX);
631 __ movl(EAX, temp_);
632
633 // Convenience aliases.
634 Register base = obj_;
635 Register expected = EAX;
636 Register value = ref_reg;
637
638 bool base_equals_value = (base == value);
639 if (kPoisonHeapReferences) {
640 if (base_equals_value) {
641 // If `base` and `value` are the same register location, move
642 // `value` to a temporary register. This way, poisoning
643 // `value` won't invalidate `base`.
644 value = temp_;
645 __ movl(value, base);
646 }
647
648 // Check that the register allocator did not assign the location
649 // of `expected` (EAX) to `value` nor to `base`, so that heap
650 // poisoning (when enabled) works as intended below.
651 // - If `value` were equal to `expected`, both references would
652 // be poisoned twice, meaning they would not be poisoned at
653 // all, as heap poisoning uses address negation.
654 // - If `base` were equal to `expected`, poisoning `expected`
655 // would invalidate `base`.
656 DCHECK_NE(value, expected);
657 DCHECK_NE(base, expected);
658
659 __ PoisonHeapReference(expected);
660 __ PoisonHeapReference(value);
661 }
662
663 __ LockCmpxchgl(field_addr_, value);
664
665 // If heap poisoning is enabled, we need to unpoison the values
666 // that were poisoned earlier.
667 if (kPoisonHeapReferences) {
668 if (base_equals_value) {
669 // `value` has been moved to a temporary register, no need
670 // to unpoison it.
671 } else {
672 __ UnpoisonHeapReference(value);
673 }
674 // No need to unpoison `expected` (EAX), as it is be overwritten below.
675 }
676
677 // Restore EAX.
678 __ popl(EAX);
679
680 __ Bind(&done);
681 __ jmp(GetExitLabel());
682 }
683
684 private:
685 // The location (register) of the marked object reference.
686 const Location ref_;
687 // The register containing the object holding the marked object reference field.
688 const Register obj_;
689 // The address of the marked reference field. The base of this address must be `obj_`.
690 const Address field_addr_;
691
692 // Should the reference in `ref_` be unpoisoned prior to marking it?
693 const bool unpoison_ref_before_marking_;
694
695 const Register temp_;
696
697 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86);
698};
699
Roland Levillain0d5a2812015-11-13 10:07:31 +0000700// Slow path generating a read barrier for a heap reference.
701class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
702 public:
703 ReadBarrierForHeapReferenceSlowPathX86(HInstruction* instruction,
704 Location out,
705 Location ref,
706 Location obj,
707 uint32_t offset,
708 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000709 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710 out_(out),
711 ref_(ref),
712 obj_(obj),
713 offset_(offset),
714 index_(index) {
715 DCHECK(kEmitCompilerReadBarrier);
716 // If `obj` is equal to `out` or `ref`, it means the initial object
717 // has been overwritten by (or after) the heap object reference load
718 // to be instrumented, e.g.:
719 //
720 // __ movl(out, Address(out, offset));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000721 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000722 //
723 // In that case, we have lost the information about the original
724 // object, and the emitted read barrier cannot work properly.
725 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
726 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
727 }
728
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100729 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000730 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
731 LocationSummary* locations = instruction_->GetLocations();
732 Register reg_out = out_.AsRegister<Register>();
733 DCHECK(locations->CanCall());
734 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +0100735 DCHECK(instruction_->IsInstanceFieldGet() ||
736 instruction_->IsStaticFieldGet() ||
737 instruction_->IsArrayGet() ||
738 instruction_->IsInstanceOf() ||
739 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700740 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000741 << "Unexpected instruction in read barrier for heap reference slow path: "
742 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743
744 __ Bind(GetEntryLabel());
745 SaveLiveRegisters(codegen, locations);
746
747 // We may have to change the index's value, but as `index_` is a
748 // constant member (like other "inputs" of this slow path),
749 // introduce a copy of it, `index`.
750 Location index = index_;
751 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100752 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000753 if (instruction_->IsArrayGet()) {
754 // Compute the actual memory offset and store it in `index`.
755 Register index_reg = index_.AsRegister<Register>();
756 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
757 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
758 // We are about to change the value of `index_reg` (see the
759 // calls to art::x86::X86Assembler::shll and
760 // art::x86::X86Assembler::AddImmediate below), but it has
761 // not been saved by the previous call to
762 // art::SlowPathCode::SaveLiveRegisters, as it is a
763 // callee-save register --
764 // art::SlowPathCode::SaveLiveRegisters does not consider
765 // callee-save registers, as it has been designed with the
766 // assumption that callee-save registers are supposed to be
767 // handled by the called function. So, as a callee-save
768 // register, `index_reg` _would_ eventually be saved onto
769 // the stack, but it would be too late: we would have
770 // changed its value earlier. Therefore, we manually save
771 // it here into another freely available register,
772 // `free_reg`, chosen of course among the caller-save
773 // registers (as a callee-save `free_reg` register would
774 // exhibit the same problem).
775 //
776 // Note we could have requested a temporary register from
777 // the register allocator instead; but we prefer not to, as
778 // this is a slow path, and we know we can find a
779 // caller-save register that is available.
780 Register free_reg = FindAvailableCallerSaveRegister(codegen);
781 __ movl(free_reg, index_reg);
782 index_reg = free_reg;
783 index = Location::RegisterLocation(index_reg);
784 } else {
785 // The initial register stored in `index_` has already been
786 // saved in the call to art::SlowPathCode::SaveLiveRegisters
787 // (as it is not a callee-save register), so we can freely
788 // use it.
789 }
790 // Shifting the index value contained in `index_reg` by the scale
791 // factor (2) cannot overflow in practice, as the runtime is
792 // unable to allocate object arrays with a size larger than
793 // 2^26 - 1 (that is, 2^28 - 4 bytes).
794 __ shll(index_reg, Immediate(TIMES_4));
795 static_assert(
796 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
797 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
798 __ AddImmediate(index_reg, Immediate(offset_));
799 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100800 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
801 // intrinsics, `index_` is not shifted by a scale factor of 2
802 // (as in the case of ArrayGet), as it is actually an offset
803 // to an object field within an object.
804 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000805 DCHECK(instruction_->GetLocations()->Intrinsified());
806 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
807 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
808 << instruction_->AsInvoke()->GetIntrinsic();
809 DCHECK_EQ(offset_, 0U);
810 DCHECK(index_.IsRegisterPair());
811 // UnsafeGet's offset location is a register pair, the low
812 // part contains the correct offset.
813 index = index_.ToLow();
814 }
815 }
816
817 // We're moving two or three locations to locations that could
818 // overlap, so we need a parallel move resolver.
819 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100820 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000821 parallel_move.AddMove(ref_,
822 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100823 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000824 nullptr);
825 parallel_move.AddMove(obj_,
826 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100827 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000828 nullptr);
829 if (index.IsValid()) {
830 parallel_move.AddMove(index,
831 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100832 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000833 nullptr);
834 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
835 } else {
836 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
837 __ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
838 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100839 x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000840 CheckEntrypointTypes<
841 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
842 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
843
844 RestoreLiveRegisters(codegen, locations);
845 __ jmp(GetExitLabel());
846 }
847
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100848 const char* GetDescription() const override { return "ReadBarrierForHeapReferenceSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000849
850 private:
851 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
852 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
853 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
854 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
855 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
856 return static_cast<Register>(i);
857 }
858 }
859 // We shall never fail to find a free caller-save register, as
860 // there are more than two core caller-save registers on x86
861 // (meaning it is possible to find one which is different from
862 // `ref` and `obj`).
863 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
864 LOG(FATAL) << "Could not find a free caller-save register";
865 UNREACHABLE();
866 }
867
Roland Levillain0d5a2812015-11-13 10:07:31 +0000868 const Location out_;
869 const Location ref_;
870 const Location obj_;
871 const uint32_t offset_;
872 // An additional location containing an index to an array.
873 // Only used for HArrayGet and the UnsafeGetObject &
874 // UnsafeGetObjectVolatile intrinsics.
875 const Location index_;
876
877 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86);
878};
879
880// Slow path generating a read barrier for a GC root.
881class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
882 public:
883 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000884 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000885 DCHECK(kEmitCompilerReadBarrier);
886 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000887
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100888 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000889 LocationSummary* locations = instruction_->GetLocations();
890 Register reg_out = out_.AsRegister<Register>();
891 DCHECK(locations->CanCall());
892 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000893 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
894 << "Unexpected instruction in read barrier for GC root slow path: "
895 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000896
897 __ Bind(GetEntryLabel());
898 SaveLiveRegisters(codegen, locations);
899
900 InvokeRuntimeCallingConvention calling_convention;
901 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
902 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100903 x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000904 instruction_,
905 instruction_->GetDexPc(),
906 this);
907 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
908 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
909
910 RestoreLiveRegisters(codegen, locations);
911 __ jmp(GetExitLabel());
912 }
913
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100914 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000915
916 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000917 const Location out_;
918 const Location root_;
919
920 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86);
921};
922
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100923#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100924// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
925#define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100926
Aart Bike9f37602015-10-09 11:15:55 -0700927inline Condition X86Condition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700928 switch (cond) {
929 case kCondEQ: return kEqual;
930 case kCondNE: return kNotEqual;
931 case kCondLT: return kLess;
932 case kCondLE: return kLessEqual;
933 case kCondGT: return kGreater;
934 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700935 case kCondB: return kBelow;
936 case kCondBE: return kBelowEqual;
937 case kCondA: return kAbove;
938 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700939 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100940 LOG(FATAL) << "Unreachable";
941 UNREACHABLE();
942}
943
Aart Bike9f37602015-10-09 11:15:55 -0700944// Maps signed condition to unsigned condition and FP condition to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100945inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
946 switch (cond) {
947 case kCondEQ: return kEqual;
948 case kCondNE: return kNotEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700949 // Signed to unsigned, and FP to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100950 case kCondLT: return kBelow;
951 case kCondLE: return kBelowEqual;
952 case kCondGT: return kAbove;
953 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700954 // Unsigned remain unchanged.
955 case kCondB: return kBelow;
956 case kCondBE: return kBelowEqual;
957 case kCondA: return kAbove;
958 case kCondAE: return kAboveEqual;
Roland Levillain4fa13f62015-07-06 18:11:54 +0100959 }
960 LOG(FATAL) << "Unreachable";
961 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700962}
963
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100964void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100965 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100966}
967
968void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100969 stream << XmmRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100970}
971
Vladimir Markoa0431112018-06-25 09:32:54 +0100972const X86InstructionSetFeatures& CodeGeneratorX86::GetInstructionSetFeatures() const {
973 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86InstructionSetFeatures();
974}
975
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100976size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
977 __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
978 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100979}
980
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100981size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
982 __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
983 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100984}
985
Mark Mendell7c8d0092015-01-26 11:21:33 -0500986size_t CodeGeneratorX86::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700987 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700988 __ movups(Address(ESP, stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -0700989 } else {
990 __ movsd(Address(ESP, stack_index), XmmRegister(reg_id));
991 }
Artem Serov6a0b6572019-07-26 20:38:37 +0100992 return GetSlowPathFPWidth();
Mark Mendell7c8d0092015-01-26 11:21:33 -0500993}
994
995size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700996 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700997 __ movups(XmmRegister(reg_id), Address(ESP, stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -0700998 } else {
999 __ movsd(XmmRegister(reg_id), Address(ESP, stack_index));
1000 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001001 return GetSlowPathFPWidth();
Mark Mendell7c8d0092015-01-26 11:21:33 -05001002}
1003
Calin Juravle175dc732015-08-25 15:42:32 +01001004void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
1005 HInstruction* instruction,
1006 uint32_t dex_pc,
1007 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001008 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001009 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
1010 if (EntrypointRequiresStackMap(entrypoint)) {
1011 RecordPcInfo(instruction, dex_pc, slow_path);
1012 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001013}
1014
Roland Levillaindec8f632016-07-22 17:10:06 +01001015void CodeGeneratorX86::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1016 HInstruction* instruction,
1017 SlowPathCode* slow_path) {
1018 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001019 GenerateInvokeRuntime(entry_point_offset);
1020}
1021
1022void CodeGeneratorX86::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001023 __ fs()->call(Address::Absolute(entry_point_offset));
1024}
1025
Mark Mendellfb8d2792015-03-31 22:16:59 -04001026CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001027 const CompilerOptions& compiler_options,
1028 OptimizingCompilerStats* stats)
Mark Mendell5f874182015-03-04 15:42:45 -05001029 : CodeGenerator(graph,
1030 kNumberOfCpuRegisters,
1031 kNumberOfXmmRegisters,
1032 kNumberOfRegisterPairs,
1033 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1034 arraysize(kCoreCalleeSaves))
1035 | (1 << kFakeReturnRegister),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001036 0,
1037 compiler_options,
1038 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001039 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001040 location_builder_(graph, this),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001041 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001042 move_resolver_(graph->GetAllocator(), this),
1043 assembler_(graph->GetAllocator()),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001044 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1045 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1046 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1047 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001048 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001049 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001050 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001051 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1052 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko93205e32016-04-13 11:59:46 +01001053 constant_area_start_(-1),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001054 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001055 method_address_offset_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001056 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001057 // Use a fake return address register to mimic Quick.
1058 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001059}
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001060
David Brazdil58282f42016-01-14 12:45:10 +00001061void CodeGeneratorX86::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001062 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001063 blocked_core_registers_[ESP] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001064}
1065
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001066InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001067 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001068 assembler_(codegen->GetAssembler()),
1069 codegen_(codegen) {}
1070
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001071static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001072 return dwarf::Reg::X86Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001073}
1074
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001075void CodeGeneratorX86::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001076 __ cfi().SetCurrentCFAOffset(kX86WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001077 __ Bind(&frame_entry_label_);
Roland Levillain199f3362014-11-27 17:15:16 +00001078 bool skip_overflow_check =
1079 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001080 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Calin Juravle93edf732015-01-20 20:14:07 +00001081
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001082 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001083 NearLabel overflow;
1084 __ cmpw(Address(kMethodRegisterArgument,
1085 ArtMethod::HotnessCountOffset().Int32Value()),
1086 Immediate(ArtMethod::MaxCounter()));
1087 __ j(kEqual, &overflow);
1088 __ addw(Address(kMethodRegisterArgument,
1089 ArtMethod::HotnessCountOffset().Int32Value()),
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001090 Immediate(1));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001091 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001092 }
1093
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001094 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001095 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86);
1096 __ testl(EAX, Address(ESP, -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001097 RecordPcInfo(nullptr, 0);
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001098 }
1099
Mark Mendell5f874182015-03-04 15:42:45 -05001100 if (HasEmptyFrame()) {
1101 return;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001102 }
Mark Mendell5f874182015-03-04 15:42:45 -05001103
1104 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1105 Register reg = kCoreCalleeSaves[i];
1106 if (allocated_registers_.ContainsCoreRegister(reg)) {
1107 __ pushl(reg);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001108 __ cfi().AdjustCFAOffset(kX86WordSize);
1109 __ cfi().RelOffset(DWARFReg(reg), 0);
Mark Mendell5f874182015-03-04 15:42:45 -05001110 }
1111 }
1112
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001113 int adjust = GetFrameSize() - FrameEntrySpillSize();
1114 __ subl(ESP, Immediate(adjust));
1115 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001116 // Save the current method if we need it. Note that we do not
1117 // do this in HCurrentMethod, as the instruction might have been removed
1118 // in the SSA graph.
1119 if (RequiresCurrentMethod()) {
1120 __ movl(Address(ESP, kCurrentMethodStackOffset), kMethodRegisterArgument);
1121 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001122
1123 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1124 // Initialize should_deoptimize flag to 0.
1125 __ movl(Address(ESP, GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1126 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001127}
1128
1129void CodeGeneratorX86::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001130 __ cfi().RememberState();
1131 if (!HasEmptyFrame()) {
1132 int adjust = GetFrameSize() - FrameEntrySpillSize();
1133 __ addl(ESP, Immediate(adjust));
1134 __ cfi().AdjustCFAOffset(-adjust);
Mark Mendell5f874182015-03-04 15:42:45 -05001135
David Srbeckyc34dc932015-04-12 09:27:43 +01001136 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1137 Register reg = kCoreCalleeSaves[i];
1138 if (allocated_registers_.ContainsCoreRegister(reg)) {
1139 __ popl(reg);
1140 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86WordSize));
1141 __ cfi().Restore(DWARFReg(reg));
1142 }
Mark Mendell5f874182015-03-04 15:42:45 -05001143 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001144 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001145 __ ret();
1146 __ cfi().RestoreState();
1147 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001148}
1149
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001150void CodeGeneratorX86::Bind(HBasicBlock* block) {
1151 __ Bind(GetLabelOf(block));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001152}
1153
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001154Location InvokeDexCallingConventionVisitorX86::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001155 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001156 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001157 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001158 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001159 case DataType::Type::kInt8:
1160 case DataType::Type::kUint16:
1161 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08001162 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001163 case DataType::Type::kInt32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001164 return Location::RegisterLocation(EAX);
1165
Aart Bik66c158e2018-01-31 12:55:04 -08001166 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001167 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001168 return Location::RegisterPairLocation(EAX, EDX);
1169
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001170 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001171 return Location::NoLocation();
1172
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001173 case DataType::Type::kFloat64:
1174 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001175 return Location::FpuRegisterLocation(XMM0);
1176 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01001177
1178 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001179}
1180
1181Location InvokeDexCallingConventionVisitorX86::GetMethodLocation() const {
1182 return Location::RegisterLocation(kMethodRegisterArgument);
1183}
1184
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001185Location InvokeDexCallingConventionVisitorX86::GetNextLocation(DataType::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001186 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001187 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001188 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001189 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001190 case DataType::Type::kInt8:
1191 case DataType::Type::kUint16:
1192 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001193 case DataType::Type::kInt32: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001194 uint32_t index = gp_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001195 stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001196 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001197 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001198 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001199 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001200 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001201 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001202
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001203 case DataType::Type::kInt64: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001204 uint32_t index = gp_index_;
1205 gp_index_ += 2;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001206 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001207 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001208 X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
1209 calling_convention.GetRegisterPairAt(index));
1210 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001211 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001212 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
1213 }
1214 }
1215
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001216 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001217 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001218 stack_index_++;
1219 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1220 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1221 } else {
1222 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
1223 }
1224 }
1225
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001226 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001227 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001228 stack_index_ += 2;
1229 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1230 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1231 } else {
1232 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001233 }
1234 }
1235
Aart Bik66c158e2018-01-31 12:55:04 -08001236 case DataType::Type::kUint32:
1237 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001238 case DataType::Type::kVoid:
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001239 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08001240 UNREACHABLE();
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001241 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001242 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001243}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001244
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001245void CodeGeneratorX86::Move32(Location destination, Location source) {
1246 if (source.Equals(destination)) {
1247 return;
1248 }
1249 if (destination.IsRegister()) {
1250 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001251 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001252 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001253 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001254 } else {
1255 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001256 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001257 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001258 } else if (destination.IsFpuRegister()) {
1259 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001260 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001261 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001262 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001263 } else {
1264 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001265 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001266 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001267 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001268 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001269 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001270 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001271 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001272 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05001273 } else if (source.IsConstant()) {
1274 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001275 int32_t value = GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05001276 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001277 } else {
1278 DCHECK(source.IsStackSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001279 __ pushl(Address(ESP, source.GetStackIndex()));
1280 __ popl(Address(ESP, destination.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001281 }
1282 }
1283}
1284
1285void CodeGeneratorX86::Move64(Location destination, Location source) {
1286 if (source.Equals(destination)) {
1287 return;
1288 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001289 if (destination.IsRegisterPair()) {
1290 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001291 EmitParallelMoves(
1292 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
1293 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001294 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001295 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001296 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001297 DataType::Type::kInt32);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001298 } else if (source.IsFpuRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001299 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
1300 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
1301 __ psrlq(src_reg, Immediate(32));
1302 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001303 } else {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001304 // No conflict possible, so just do the moves.
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001305 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001306 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
1307 __ movl(destination.AsRegisterPairHigh<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001308 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
1309 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001310 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05001311 if (source.IsFpuRegister()) {
1312 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
1313 } else if (source.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001314 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001315 } else if (source.IsRegisterPair()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001316 size_t elem_size = DataType::Size(DataType::Type::kInt32);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001317 // Create stack space for 2 elements.
1318 __ subl(ESP, Immediate(2 * elem_size));
1319 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
1320 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
1321 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
1322 // And remove the temporary stack space we allocated.
1323 __ addl(ESP, Immediate(2 * elem_size));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001324 } else {
1325 LOG(FATAL) << "Unimplemented";
1326 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001327 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001328 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001329 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001330 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001331 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001332 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001333 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001334 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001335 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001336 } else if (source.IsConstant()) {
1337 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001338 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1339 int64_t value = GetInt64ValueOf(constant);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001340 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(Low32Bits(value)));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001341 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
1342 Immediate(High32Bits(value)));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001343 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001344 DCHECK(source.IsDoubleStackSlot()) << source;
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001345 EmitParallelMoves(
1346 Location::StackSlot(source.GetStackIndex()),
1347 Location::StackSlot(destination.GetStackIndex()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001348 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001349 Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001350 Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001351 DataType::Type::kInt32);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001352 }
1353 }
1354}
1355
Calin Juravle175dc732015-08-25 15:42:32 +01001356void CodeGeneratorX86::MoveConstant(Location location, int32_t value) {
1357 DCHECK(location.IsRegister());
1358 __ movl(location.AsRegister<Register>(), Immediate(value));
1359}
1360
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001361void CodeGeneratorX86::MoveLocation(Location dst, Location src, DataType::Type dst_type) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001362 HParallelMove move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001363 if (dst_type == DataType::Type::kInt64 && !src.IsConstant() && !src.IsFpuRegister()) {
1364 move.AddMove(src.ToLow(), dst.ToLow(), DataType::Type::kInt32, nullptr);
1365 move.AddMove(src.ToHigh(), dst.ToHigh(), DataType::Type::kInt32, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001366 } else {
David Brazdil74eb1b22015-12-14 11:44:01 +00001367 move.AddMove(src, dst, dst_type, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001368 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001369 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001370}
1371
1372void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
1373 if (location.IsRegister()) {
1374 locations->AddTemp(location);
1375 } else if (location.IsRegisterPair()) {
1376 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1377 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
1378 } else {
1379 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1380 }
1381}
1382
David Brazdilfc6a86a2015-06-26 10:33:45 +00001383void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001384 if (successor->IsExitBlock()) {
1385 DCHECK(got->GetPrevious()->AlwaysThrows());
1386 return; // no code needed
1387 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001388
1389 HBasicBlock* block = got->GetBlock();
1390 HInstruction* previous = got->GetPrevious();
1391
1392 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001393 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001394 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
1395 __ pushl(EAX);
1396 __ movl(EAX, Address(ESP, kX86WordSize));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001397 NearLabel overflow;
1398 __ cmpw(Address(EAX, ArtMethod::HotnessCountOffset().Int32Value()),
1399 Immediate(ArtMethod::MaxCounter()));
1400 __ j(kEqual, &overflow);
1401 __ addw(Address(EAX, ArtMethod::HotnessCountOffset().Int32Value()),
1402 Immediate(1));
1403 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001404 __ popl(EAX);
1405 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001406 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1407 return;
1408 }
1409
1410 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1411 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1412 }
1413 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001414 __ jmp(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001415 }
1416}
1417
David Brazdilfc6a86a2015-06-26 10:33:45 +00001418void LocationsBuilderX86::VisitGoto(HGoto* got) {
1419 got->SetLocations(nullptr);
1420}
1421
1422void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
1423 HandleGoto(got, got->GetSuccessor());
1424}
1425
1426void LocationsBuilderX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1427 try_boundary->SetLocations(nullptr);
1428}
1429
1430void InstructionCodeGeneratorX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1431 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1432 if (!successor->IsExitBlock()) {
1433 HandleGoto(try_boundary, successor);
1434 }
1435}
1436
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001437void LocationsBuilderX86::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001438 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001439}
1440
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001441void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001442}
1443
Mark Mendell152408f2015-12-31 12:28:50 -05001444template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001445void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001446 LabelType* true_label,
1447 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001448 if (cond->IsFPConditionTrueIfNaN()) {
1449 __ j(kUnordered, true_label);
1450 } else if (cond->IsFPConditionFalseIfNaN()) {
1451 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001452 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001453 __ j(X86UnsignedOrFPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001454}
1455
Mark Mendell152408f2015-12-31 12:28:50 -05001456template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001457void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001458 LabelType* true_label,
1459 LabelType* false_label) {
Mark Mendellc4701932015-04-10 13:18:51 -04001460 LocationSummary* locations = cond->GetLocations();
1461 Location left = locations->InAt(0);
1462 Location right = locations->InAt(1);
1463 IfCondition if_cond = cond->GetCondition();
1464
Mark Mendellc4701932015-04-10 13:18:51 -04001465 Register left_high = left.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001466 Register left_low = left.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001467 IfCondition true_high_cond = if_cond;
1468 IfCondition false_high_cond = cond->GetOppositeCondition();
Aart Bike9f37602015-10-09 11:15:55 -07001469 Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
Mark Mendellc4701932015-04-10 13:18:51 -04001470
1471 // Set the conditions for the test, remembering that == needs to be
1472 // decided using the low words.
1473 switch (if_cond) {
1474 case kCondEQ:
Mark Mendellc4701932015-04-10 13:18:51 -04001475 case kCondNE:
Roland Levillain4fa13f62015-07-06 18:11:54 +01001476 // Nothing to do.
Mark Mendellc4701932015-04-10 13:18:51 -04001477 break;
1478 case kCondLT:
1479 false_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001480 break;
1481 case kCondLE:
1482 true_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001483 break;
1484 case kCondGT:
1485 false_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001486 break;
1487 case kCondGE:
1488 true_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001489 break;
Aart Bike9f37602015-10-09 11:15:55 -07001490 case kCondB:
1491 false_high_cond = kCondA;
1492 break;
1493 case kCondBE:
1494 true_high_cond = kCondB;
1495 break;
1496 case kCondA:
1497 false_high_cond = kCondB;
1498 break;
1499 case kCondAE:
1500 true_high_cond = kCondA;
1501 break;
Mark Mendellc4701932015-04-10 13:18:51 -04001502 }
1503
1504 if (right.IsConstant()) {
1505 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellc4701932015-04-10 13:18:51 -04001506 int32_t val_high = High32Bits(value);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001507 int32_t val_low = Low32Bits(value);
Mark Mendellc4701932015-04-10 13:18:51 -04001508
Aart Bika19616e2016-02-01 18:57:58 -08001509 codegen_->Compare32BitValue(left_high, val_high);
Mark Mendellc4701932015-04-10 13:18:51 -04001510 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001511 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001512 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001513 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001514 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001515 __ j(X86Condition(true_high_cond), true_label);
1516 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001517 }
1518 // Must be equal high, so compare the lows.
Aart Bika19616e2016-02-01 18:57:58 -08001519 codegen_->Compare32BitValue(left_low, val_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001520 } else if (right.IsRegisterPair()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001521 Register right_high = right.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001522 Register right_low = right.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001523
1524 __ cmpl(left_high, right_high);
1525 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001526 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001527 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001528 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001529 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001530 __ j(X86Condition(true_high_cond), true_label);
1531 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001532 }
1533 // Must be equal high, so compare the lows.
1534 __ cmpl(left_low, right_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001535 } else {
1536 DCHECK(right.IsDoubleStackSlot());
1537 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1538 if (if_cond == kCondNE) {
1539 __ j(X86Condition(true_high_cond), true_label);
1540 } else if (if_cond == kCondEQ) {
1541 __ j(X86Condition(false_high_cond), false_label);
1542 } else {
1543 __ j(X86Condition(true_high_cond), true_label);
1544 __ j(X86Condition(false_high_cond), false_label);
1545 }
1546 // Must be equal high, so compare the lows.
1547 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Mark Mendellc4701932015-04-10 13:18:51 -04001548 }
1549 // The last comparison might be unsigned.
1550 __ j(final_condition, true_label);
1551}
1552
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001553void InstructionCodeGeneratorX86::GenerateFPCompare(Location lhs,
1554 Location rhs,
1555 HInstruction* insn,
1556 bool is_double) {
1557 HX86LoadFromConstantTable* const_area = insn->InputAt(1)->AsX86LoadFromConstantTable();
1558 if (is_double) {
1559 if (rhs.IsFpuRegister()) {
1560 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1561 } else if (const_area != nullptr) {
1562 DCHECK(const_area->IsEmittedAtUseSite());
1563 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(),
1564 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001565 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
1566 const_area->GetBaseMethodAddress(),
1567 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001568 } else {
1569 DCHECK(rhs.IsDoubleStackSlot());
1570 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1571 }
1572 } else {
1573 if (rhs.IsFpuRegister()) {
1574 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1575 } else if (const_area != nullptr) {
1576 DCHECK(const_area->IsEmittedAtUseSite());
1577 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(),
1578 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001579 const_area->GetConstant()->AsFloatConstant()->GetValue(),
1580 const_area->GetBaseMethodAddress(),
1581 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001582 } else {
1583 DCHECK(rhs.IsStackSlot());
1584 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1585 }
1586 }
1587}
1588
Mark Mendell152408f2015-12-31 12:28:50 -05001589template<class LabelType>
David Brazdil0debae72015-11-12 18:37:00 +00001590void InstructionCodeGeneratorX86::GenerateCompareTestAndBranch(HCondition* condition,
Mark Mendell152408f2015-12-31 12:28:50 -05001591 LabelType* true_target_in,
1592 LabelType* false_target_in) {
David Brazdil0debae72015-11-12 18:37:00 +00001593 // Generated branching requires both targets to be explicit. If either of the
1594 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Mark Mendell152408f2015-12-31 12:28:50 -05001595 LabelType fallthrough_target;
1596 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1597 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
David Brazdil0debae72015-11-12 18:37:00 +00001598
Mark Mendellc4701932015-04-10 13:18:51 -04001599 LocationSummary* locations = condition->GetLocations();
1600 Location left = locations->InAt(0);
1601 Location right = locations->InAt(1);
1602
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001603 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001604 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001605 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001606 GenerateLongComparesAndJumps(condition, true_target, false_target);
1607 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001608 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001609 GenerateFPCompare(left, right, condition, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001610 GenerateFPJumps(condition, true_target, false_target);
1611 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001612 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001613 GenerateFPCompare(left, right, condition, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001614 GenerateFPJumps(condition, true_target, false_target);
1615 break;
1616 default:
1617 LOG(FATAL) << "Unexpected compare type " << type;
1618 }
1619
David Brazdil0debae72015-11-12 18:37:00 +00001620 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001621 __ jmp(false_target);
1622 }
David Brazdil0debae72015-11-12 18:37:00 +00001623
1624 if (fallthrough_target.IsLinked()) {
1625 __ Bind(&fallthrough_target);
1626 }
Mark Mendellc4701932015-04-10 13:18:51 -04001627}
1628
David Brazdil0debae72015-11-12 18:37:00 +00001629static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1630 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1631 // are set only strictly before `branch`. We can't use the eflags on long/FP
1632 // conditions if they are materialized due to the complex branching.
1633 return cond->IsCondition() &&
1634 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001635 cond->InputAt(0)->GetType() != DataType::Type::kInt64 &&
1636 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001637}
1638
Mark Mendell152408f2015-12-31 12:28:50 -05001639template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001640void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001641 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001642 LabelType* true_target,
1643 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001644 HInstruction* cond = instruction->InputAt(condition_input_index);
1645
1646 if (true_target == nullptr && false_target == nullptr) {
1647 // Nothing to do. The code always falls through.
1648 return;
1649 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001650 // Constant condition, statically compared against "true" (integer value 1).
1651 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001652 if (true_target != nullptr) {
1653 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001654 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001655 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001656 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001657 if (false_target != nullptr) {
1658 __ jmp(false_target);
1659 }
1660 }
1661 return;
1662 }
1663
1664 // The following code generates these patterns:
1665 // (1) true_target == nullptr && false_target != nullptr
1666 // - opposite condition true => branch to false_target
1667 // (2) true_target != nullptr && false_target == nullptr
1668 // - condition true => branch to true_target
1669 // (3) true_target != nullptr && false_target != nullptr
1670 // - condition true => branch to true_target
1671 // - branch to false_target
1672 if (IsBooleanValueOrMaterializedCondition(cond)) {
1673 if (AreEflagsSetFrom(cond, instruction)) {
1674 if (true_target == nullptr) {
1675 __ j(X86Condition(cond->AsCondition()->GetOppositeCondition()), false_target);
1676 } else {
1677 __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
1678 }
1679 } else {
1680 // Materialized condition, compare against 0.
1681 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1682 if (lhs.IsRegister()) {
1683 __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
1684 } else {
1685 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
1686 }
1687 if (true_target == nullptr) {
1688 __ j(kEqual, false_target);
1689 } else {
1690 __ j(kNotEqual, true_target);
1691 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001692 }
1693 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001694 // Condition has not been materialized, use its inputs as the comparison and
1695 // its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001696 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00001697
1698 // If this is a long or FP comparison that has been folded into
1699 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001700 DataType::Type type = condition->InputAt(0)->GetType();
1701 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001702 GenerateCompareTestAndBranch(condition, true_target, false_target);
1703 return;
1704 }
1705
1706 Location lhs = condition->GetLocations()->InAt(0);
1707 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001708 // LHS is guaranteed to be in a register (see LocationsBuilderX86::HandleCondition).
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001709 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001710 if (true_target == nullptr) {
1711 __ j(X86Condition(condition->GetOppositeCondition()), false_target);
1712 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001713 __ j(X86Condition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001714 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001715 }
David Brazdil0debae72015-11-12 18:37:00 +00001716
1717 // If neither branch falls through (case 3), the conditional branch to `true_target`
1718 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1719 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001720 __ jmp(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001721 }
1722}
1723
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001724void LocationsBuilderX86::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001725 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001726 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001727 locations->SetInAt(0, Location::Any());
1728 }
1729}
1730
1731void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001732 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1733 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1734 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1735 nullptr : codegen_->GetLabelOf(true_successor);
1736 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1737 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001738 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001739}
1740
1741void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001742 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001743 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001744 InvokeRuntimeCallingConvention calling_convention;
1745 RegisterSet caller_saves = RegisterSet::Empty();
1746 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1747 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001748 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001749 locations->SetInAt(0, Location::Any());
1750 }
1751}
1752
1753void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001754 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001755 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001756 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001757 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001758 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001759}
1760
Mingyao Yang063fc772016-08-02 11:02:54 -07001761void LocationsBuilderX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001762 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001763 LocationSummary(flag, LocationSummary::kNoCall);
1764 locations->SetOut(Location::RequiresRegister());
1765}
1766
1767void InstructionCodeGeneratorX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1768 __ movl(flag->GetLocations()->Out().AsRegister<Register>(),
1769 Address(ESP, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1770}
1771
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001772static bool SelectCanUseCMOV(HSelect* select) {
1773 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001774 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001775 return false;
1776 }
1777
1778 // A FP condition doesn't generate the single CC that we need.
1779 // In 32 bit mode, a long condition doesn't generate a single CC either.
1780 HInstruction* condition = select->GetCondition();
1781 if (condition->IsCondition()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001782 DataType::Type compare_type = condition->InputAt(0)->GetType();
1783 if (compare_type == DataType::Type::kInt64 ||
1784 DataType::IsFloatingPointType(compare_type)) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001785 return false;
1786 }
1787 }
1788
1789 // We can generate a CMOV for this Select.
1790 return true;
1791}
1792
David Brazdil74eb1b22015-12-14 11:44:01 +00001793void LocationsBuilderX86::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001794 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001795 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001796 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001797 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001798 } else {
1799 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001800 if (SelectCanUseCMOV(select)) {
1801 if (select->InputAt(1)->IsConstant()) {
1802 // Cmov can't handle a constant value.
1803 locations->SetInAt(1, Location::RequiresRegister());
1804 } else {
1805 locations->SetInAt(1, Location::Any());
1806 }
1807 } else {
1808 locations->SetInAt(1, Location::Any());
1809 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001810 }
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001811 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1812 locations->SetInAt(2, Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00001813 }
1814 locations->SetOut(Location::SameAsFirstInput());
1815}
1816
1817void InstructionCodeGeneratorX86::VisitSelect(HSelect* select) {
1818 LocationSummary* locations = select->GetLocations();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001819 DCHECK(locations->InAt(0).Equals(locations->Out()));
1820 if (SelectCanUseCMOV(select)) {
1821 // If both the condition and the source types are integer, we can generate
1822 // a CMOV to implement Select.
1823
1824 HInstruction* select_condition = select->GetCondition();
1825 Condition cond = kNotEqual;
1826
1827 // Figure out how to test the 'condition'.
1828 if (select_condition->IsCondition()) {
1829 HCondition* condition = select_condition->AsCondition();
1830 if (!condition->IsEmittedAtUseSite()) {
1831 // This was a previously materialized condition.
1832 // Can we use the existing condition code?
1833 if (AreEflagsSetFrom(condition, select)) {
1834 // Materialization was the previous instruction. Condition codes are right.
1835 cond = X86Condition(condition->GetCondition());
1836 } else {
1837 // No, we have to recreate the condition code.
1838 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1839 __ testl(cond_reg, cond_reg);
1840 }
1841 } else {
1842 // We can't handle FP or long here.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001843 DCHECK_NE(condition->InputAt(0)->GetType(), DataType::Type::kInt64);
1844 DCHECK(!DataType::IsFloatingPointType(condition->InputAt(0)->GetType()));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001845 LocationSummary* cond_locations = condition->GetLocations();
Roland Levillain0b671c02016-08-19 12:02:34 +01001846 codegen_->GenerateIntCompare(cond_locations->InAt(0), cond_locations->InAt(1));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001847 cond = X86Condition(condition->GetCondition());
1848 }
1849 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001850 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001851 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1852 __ testl(cond_reg, cond_reg);
1853 }
1854
1855 // If the condition is true, overwrite the output, which already contains false.
1856 Location false_loc = locations->InAt(0);
1857 Location true_loc = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001858 if (select->GetType() == DataType::Type::kInt64) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001859 // 64 bit conditional move.
1860 Register false_high = false_loc.AsRegisterPairHigh<Register>();
1861 Register false_low = false_loc.AsRegisterPairLow<Register>();
1862 if (true_loc.IsRegisterPair()) {
1863 __ cmovl(cond, false_high, true_loc.AsRegisterPairHigh<Register>());
1864 __ cmovl(cond, false_low, true_loc.AsRegisterPairLow<Register>());
1865 } else {
1866 __ cmovl(cond, false_high, Address(ESP, true_loc.GetHighStackIndex(kX86WordSize)));
1867 __ cmovl(cond, false_low, Address(ESP, true_loc.GetStackIndex()));
1868 }
1869 } else {
1870 // 32 bit conditional move.
1871 Register false_reg = false_loc.AsRegister<Register>();
1872 if (true_loc.IsRegister()) {
1873 __ cmovl(cond, false_reg, true_loc.AsRegister<Register>());
1874 } else {
1875 __ cmovl(cond, false_reg, Address(ESP, true_loc.GetStackIndex()));
1876 }
1877 }
1878 } else {
1879 NearLabel false_target;
1880 GenerateTestAndBranch<NearLabel>(
Andreas Gampe3db70682018-12-26 15:12:03 -08001881 select, /* condition_input_index= */ 2, /* true_target= */ nullptr, &false_target);
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001882 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1883 __ Bind(&false_target);
1884 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001885}
1886
David Srbecky0cf44932015-12-09 14:09:59 +00001887void LocationsBuilderX86::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001888 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001889}
1890
David Srbeckyd28f4a02016-03-14 17:14:24 +00001891void InstructionCodeGeneratorX86::VisitNativeDebugInfo(HNativeDebugInfo*) {
1892 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001893}
1894
1895void CodeGeneratorX86::GenerateNop() {
1896 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001897}
1898
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001899void LocationsBuilderX86::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001900 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001901 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001902 // Handle the long/FP comparisons made in instruction simplification.
1903 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001904 case DataType::Type::kInt64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001905 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell8659e842016-02-16 10:41:46 -05001906 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001907 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001908 locations->SetOut(Location::RequiresRegister());
1909 }
1910 break;
1911 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001912 case DataType::Type::kFloat32:
1913 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001914 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001915 if (cond->InputAt(1)->IsX86LoadFromConstantTable()) {
1916 DCHECK(cond->InputAt(1)->IsEmittedAtUseSite());
1917 } else if (cond->InputAt(1)->IsConstant()) {
1918 locations->SetInAt(1, Location::RequiresFpuRegister());
1919 } else {
1920 locations->SetInAt(1, Location::Any());
1921 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001922 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001923 locations->SetOut(Location::RequiresRegister());
1924 }
1925 break;
1926 }
1927 default:
1928 locations->SetInAt(0, Location::RequiresRegister());
1929 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001930 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001931 // We need a byte register.
1932 locations->SetOut(Location::RegisterLocation(ECX));
1933 }
1934 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001935 }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001936}
1937
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001938void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001939 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001940 return;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001941 }
Mark Mendellc4701932015-04-10 13:18:51 -04001942
1943 LocationSummary* locations = cond->GetLocations();
1944 Location lhs = locations->InAt(0);
1945 Location rhs = locations->InAt(1);
1946 Register reg = locations->Out().AsRegister<Register>();
Mark Mendell152408f2015-12-31 12:28:50 -05001947 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001948
1949 switch (cond->InputAt(0)->GetType()) {
1950 default: {
1951 // Integer case.
1952
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01001953 // Clear output register: setb only sets the low byte.
Mark Mendellc4701932015-04-10 13:18:51 -04001954 __ xorl(reg, reg);
Roland Levillain0b671c02016-08-19 12:02:34 +01001955 codegen_->GenerateIntCompare(lhs, rhs);
Aart Bike9f37602015-10-09 11:15:55 -07001956 __ setb(X86Condition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001957 return;
1958 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001959 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001960 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1961 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001962 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001963 GenerateFPCompare(lhs, rhs, cond, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001964 GenerateFPJumps(cond, &true_label, &false_label);
1965 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001966 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001967 GenerateFPCompare(lhs, rhs, cond, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001968 GenerateFPJumps(cond, &true_label, &false_label);
1969 break;
1970 }
1971
1972 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001973 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001974
Roland Levillain4fa13f62015-07-06 18:11:54 +01001975 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001976 __ Bind(&false_label);
1977 __ xorl(reg, reg);
1978 __ jmp(&done_label);
1979
Roland Levillain4fa13f62015-07-06 18:11:54 +01001980 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001981 __ Bind(&true_label);
1982 __ movl(reg, Immediate(1));
1983 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001984}
1985
1986void LocationsBuilderX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001987 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001988}
1989
1990void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001991 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001992}
1993
1994void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001995 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001996}
1997
1998void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001999 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002000}
2001
2002void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002003 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002004}
2005
2006void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002007 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002008}
2009
2010void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002011 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002012}
2013
2014void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002015 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002016}
2017
2018void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002019 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002020}
2021
2022void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002023 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002024}
2025
2026void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002027 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002028}
2029
2030void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002031 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002032}
2033
Aart Bike9f37602015-10-09 11:15:55 -07002034void LocationsBuilderX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002035 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002036}
2037
2038void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002039 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002040}
2041
2042void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002043 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002044}
2045
2046void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002047 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002048}
2049
2050void LocationsBuilderX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002051 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002052}
2053
2054void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002055 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002056}
2057
2058void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002059 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002060}
2061
2062void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002063 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002064}
2065
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002066void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002067 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002068 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002069 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002070}
2071
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002072void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002073 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002074}
2075
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002076void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
2077 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002078 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002079 locations->SetOut(Location::ConstantLocation(constant));
2080}
2081
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002082void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002083 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002084}
2085
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002086void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002087 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002088 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002089 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002090}
2091
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002092void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002093 // Will be generated at use site.
2094}
2095
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002096void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
2097 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002098 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002099 locations->SetOut(Location::ConstantLocation(constant));
2100}
2101
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002102void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002103 // Will be generated at use site.
2104}
2105
2106void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
2107 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002108 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002109 locations->SetOut(Location::ConstantLocation(constant));
2110}
2111
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002112void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002113 // Will be generated at use site.
2114}
2115
Igor Murashkind01745e2017-04-05 16:40:31 -07002116void LocationsBuilderX86::VisitConstructorFence(HConstructorFence* constructor_fence) {
2117 constructor_fence->SetLocations(nullptr);
2118}
2119
2120void InstructionCodeGeneratorX86::VisitConstructorFence(
2121 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2122 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2123}
2124
Calin Juravle27df7582015-04-17 19:12:31 +01002125void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2126 memory_barrier->SetLocations(nullptr);
2127}
2128
2129void InstructionCodeGeneratorX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002130 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002131}
2132
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002133void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002134 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002135}
2136
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002137void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002138 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002139}
2140
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002141void LocationsBuilderX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002142 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002143 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002144 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002145 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002146 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002147 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002148 case DataType::Type::kInt8:
2149 case DataType::Type::kUint16:
2150 case DataType::Type::kInt16:
2151 case DataType::Type::kInt32:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002152 locations->SetInAt(0, Location::RegisterLocation(EAX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002153 break;
2154
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002155 case DataType::Type::kInt64:
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002156 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002157 0, Location::RegisterPairLocation(EAX, EDX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002158 break;
2159
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002160 case DataType::Type::kFloat32:
2161 case DataType::Type::kFloat64:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002162 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002163 0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002164 break;
2165
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002166 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002167 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002168 }
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002169}
2170
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002171void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002172 if (kIsDebugBuild) {
2173 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002174 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002175 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002176 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002177 case DataType::Type::kInt8:
2178 case DataType::Type::kUint16:
2179 case DataType::Type::kInt16:
2180 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002181 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002182 break;
2183
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002184 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002185 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
2186 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002187 break;
2188
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002189 case DataType::Type::kFloat32:
2190 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002191 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002192 break;
2193
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002194 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002195 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002196 }
2197 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002198 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002199}
2200
Calin Juravle175dc732015-08-25 15:42:32 +01002201void LocationsBuilderX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2202 // The trampoline uses the same calling convention as dex calling conventions,
2203 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2204 // the method_idx.
2205 HandleInvoke(invoke);
2206}
2207
2208void InstructionCodeGeneratorX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2209 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2210}
2211
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002212void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002213 // Explicit clinit checks triggered by static invokes must have been pruned by
2214 // art::PrepareForRegisterAllocation.
2215 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002216
Mark Mendellfb8d2792015-03-31 22:16:59 -04002217 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002218 if (intrinsic.TryDispatch(invoke)) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002219 if (invoke->GetLocations()->CanCall() &&
2220 invoke->HasPcRelativeMethodLoadKind() &&
2221 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).IsInvalid()) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00002222 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002223 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002224 return;
2225 }
2226
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002227 HandleInvoke(invoke);
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002228
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002229 // For PC-relative dex cache the invoke has an extra input, the PC-relative address base.
Vladimir Marko65979462017-05-19 17:25:12 +01002230 if (invoke->HasPcRelativeMethodLoadKind()) {
Vladimir Markob4536b72015-11-24 13:45:23 +00002231 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002232 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002233}
2234
Mark Mendell09ed1a32015-03-25 08:30:06 -04002235static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) {
2236 if (invoke->GetLocations()->Intrinsified()) {
2237 IntrinsicCodeGeneratorX86 intrinsic(codegen);
2238 intrinsic.Dispatch(invoke);
2239 return true;
2240 }
2241 return false;
2242}
2243
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002244void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002245 // Explicit clinit checks triggered by static invokes must have been pruned by
2246 // art::PrepareForRegisterAllocation.
2247 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002248
Mark Mendell09ed1a32015-03-25 08:30:06 -04002249 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2250 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002251 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002252
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002253 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002254 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002255 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002256}
2257
2258void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00002259 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2260 if (intrinsic.TryDispatch(invoke)) {
2261 return;
2262 }
2263
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002264 HandleInvoke(invoke);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002265 if (codegen_->GetCompilerOptions().IsBaseline() && !Runtime::Current()->IsAotCompiler()) {
2266 // Add one temporary for inline cache update.
2267 invoke->GetLocations()->AddTemp(Location::RegisterLocation(EBP));
2268 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002269}
2270
2271void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002272 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002273 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002274}
2275
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002276void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002277 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2278 return;
2279 }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002280
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002281 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002282 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002283}
2284
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002285void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002286 // This call to HandleInvoke allocates a temporary (core) register
2287 // which is also used to transfer the hidden argument from FP to
2288 // core register.
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002289 HandleInvoke(invoke);
2290 // Add the hidden argument.
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002291 invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM7));
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002292
2293 if (codegen_->GetCompilerOptions().IsBaseline() && !Runtime::Current()->IsAotCompiler()) {
2294 // Add one temporary for inline cache update.
2295 invoke->GetLocations()->AddTemp(Location::RegisterLocation(EBP));
2296 }
2297}
2298
2299void CodeGeneratorX86::MaybeGenerateInlineCacheCheck(HInstruction* instruction, Register klass) {
2300 DCHECK_EQ(EAX, klass);
Nicolas Geoffray17a39ba2019-11-27 20:57:48 +00002301 // We know the destination of an intrinsic, so no need to record inline
2302 // caches (also the intrinsic location builder doesn't request an additional
2303 // temporary).
2304 if (!instruction->GetLocations()->Intrinsified() &&
2305 GetCompilerOptions().IsBaseline() &&
2306 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002307 DCHECK(!instruction->GetEnvironment()->IsFromInlinedInvoke());
2308 ScopedObjectAccess soa(Thread::Current());
2309 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
2310 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
2311 uint32_t address = reinterpret_cast32<uint32_t>(cache);
2312 if (kIsDebugBuild) {
2313 uint32_t temp_index = instruction->GetLocations()->GetTempCount() - 1u;
2314 CHECK_EQ(EBP, instruction->GetLocations()->GetTemp(temp_index).AsRegister<Register>());
2315 }
2316 Register temp = EBP;
2317 NearLabel done;
2318 __ movl(temp, Immediate(address));
2319 // Fast path for a monomorphic cache.
2320 __ cmpl(klass, Address(temp, InlineCache::ClassesOffset().Int32Value()));
2321 __ j(kEqual, &done);
2322 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(kQuickUpdateInlineCache).Int32Value());
2323 __ Bind(&done);
2324 }
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002325}
2326
2327void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
2328 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002329 LocationSummary* locations = invoke->GetLocations();
2330 Register temp = locations->GetTemp(0).AsRegister<Register>();
2331 XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002332 Location receiver = locations->InAt(0);
2333 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2334
Roland Levillain0d5a2812015-11-13 10:07:31 +00002335 // Set the hidden argument. This is safe to do this here, as XMM7
2336 // won't be modified thereafter, before the `call` instruction.
2337 DCHECK_EQ(XMM7, hidden_reg);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002338 __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002339 __ movd(hidden_reg, temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002340
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002341 if (receiver.IsStackSlot()) {
2342 __ movl(temp, Address(ESP, receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002343 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002344 __ movl(temp, Address(temp, class_offset));
2345 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002346 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002347 __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002348 }
Roland Levillain4d027112015-07-01 15:41:14 +01002349 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002350 // Instead of simply (possibly) unpoisoning `temp` here, we should
2351 // emit a read barrier for the previous class reference load.
2352 // However this is not required in practice, as this is an
2353 // intermediate/temporary reference and because the current
2354 // concurrent copying collector keeps the from-space memory
2355 // intact/accessible until the end of the marking phase (the
2356 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002357 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002358
2359 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
2360
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002361 // temp = temp->GetAddressOfIMT()
2362 __ movl(temp,
2363 Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002364 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002365 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002366 invoke->GetImtIndex(), kX86PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002367 __ movl(temp, Address(temp, method_offset));
2368 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002369 __ call(Address(temp,
Andreas Gampe542451c2016-07-26 09:02:02 -07002370 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002371
2372 DCHECK(!codegen_->IsLeafMethod());
2373 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2374}
2375
Orion Hodsonac141392017-01-13 11:53:47 +00002376void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2377 HandleInvoke(invoke);
2378}
2379
2380void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2381 codegen_->GenerateInvokePolymorphicCall(invoke);
2382}
2383
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002384void LocationsBuilderX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2385 HandleInvoke(invoke);
2386}
2387
2388void InstructionCodeGeneratorX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2389 codegen_->GenerateInvokeCustomCall(invoke);
2390}
2391
Roland Levillain88cb1752014-10-20 16:36:47 +01002392void LocationsBuilderX86::VisitNeg(HNeg* neg) {
2393 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002394 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002395 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002396 case DataType::Type::kInt32:
2397 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002398 locations->SetInAt(0, Location::RequiresRegister());
2399 locations->SetOut(Location::SameAsFirstInput());
2400 break;
2401
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002402 case DataType::Type::kFloat32:
Roland Levillain5368c212014-11-27 15:03:41 +00002403 locations->SetInAt(0, Location::RequiresFpuRegister());
2404 locations->SetOut(Location::SameAsFirstInput());
2405 locations->AddTemp(Location::RequiresRegister());
2406 locations->AddTemp(Location::RequiresFpuRegister());
2407 break;
2408
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002409 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002410 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002411 locations->SetOut(Location::SameAsFirstInput());
2412 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002413 break;
2414
2415 default:
2416 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2417 }
2418}
2419
2420void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
2421 LocationSummary* locations = neg->GetLocations();
2422 Location out = locations->Out();
2423 Location in = locations->InAt(0);
2424 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002425 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002426 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002427 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002428 __ negl(out.AsRegister<Register>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002429 break;
2430
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002431 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002432 DCHECK(in.IsRegisterPair());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002433 DCHECK(in.Equals(out));
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002434 __ negl(out.AsRegisterPairLow<Register>());
2435 // Negation is similar to subtraction from zero. The least
2436 // significant byte triggers a borrow when it is different from
2437 // zero; to take it into account, add 1 to the most significant
2438 // byte if the carry flag (CF) is set to 1 after the first NEGL
2439 // operation.
2440 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2441 __ negl(out.AsRegisterPairHigh<Register>());
2442 break;
2443
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002444 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002445 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002446 Register constant = locations->GetTemp(0).AsRegister<Register>();
2447 XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002448 // Implement float negation with an exclusive or with value
2449 // 0x80000000 (mask for bit 31, representing the sign of a
2450 // single-precision floating-point number).
2451 __ movl(constant, Immediate(INT32_C(0x80000000)));
2452 __ movd(mask, constant);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002453 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002454 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002455 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002456
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002457 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002458 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002459 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002460 // Implement double negation with an exclusive or with value
2461 // 0x8000000000000000 (mask for bit 63, representing the sign of
2462 // a double-precision floating-point number).
2463 __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002464 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002465 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002466 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002467
2468 default:
2469 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2470 }
2471}
2472
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002473void LocationsBuilderX86::VisitX86FPNeg(HX86FPNeg* neg) {
2474 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002475 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002476 DCHECK(DataType::IsFloatingPointType(neg->GetType()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002477 locations->SetInAt(0, Location::RequiresFpuRegister());
2478 locations->SetInAt(1, Location::RequiresRegister());
2479 locations->SetOut(Location::SameAsFirstInput());
2480 locations->AddTemp(Location::RequiresFpuRegister());
2481}
2482
2483void InstructionCodeGeneratorX86::VisitX86FPNeg(HX86FPNeg* neg) {
2484 LocationSummary* locations = neg->GetLocations();
2485 Location out = locations->Out();
2486 DCHECK(locations->InAt(0).Equals(out));
2487
2488 Register constant_area = locations->InAt(1).AsRegister<Register>();
2489 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002490 if (neg->GetType() == DataType::Type::kFloat32) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002491 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x80000000),
2492 neg->GetBaseMethodAddress(),
2493 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002494 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2495 } else {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002496 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000),
2497 neg->GetBaseMethodAddress(),
2498 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002499 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2500 }
2501}
2502
Roland Levillaindff1f282014-11-05 14:15:05 +00002503void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002504 DataType::Type result_type = conversion->GetResultType();
2505 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002506 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2507 << input_type << " -> " << result_type;
Roland Levillain624279f2014-12-04 11:54:28 +00002508
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002509 // The float-to-long and double-to-long type conversions rely on a
2510 // call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00002511 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002512 ((input_type == DataType::Type::kFloat32 || input_type == DataType::Type::kFloat64)
2513 && result_type == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002514 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00002515 : LocationSummary::kNoCall;
2516 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002517 new (GetGraph()->GetAllocator()) LocationSummary(conversion, call_kind);
Roland Levillain624279f2014-12-04 11:54:28 +00002518
Roland Levillaindff1f282014-11-05 14:15:05 +00002519 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002520 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002521 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002522 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002523 case DataType::Type::kUint8:
2524 case DataType::Type::kInt8:
2525 case DataType::Type::kUint16:
2526 case DataType::Type::kInt16:
2527 case DataType::Type::kInt32:
2528 locations->SetInAt(0, Location::ByteRegisterOrConstant(ECX, conversion->InputAt(0)));
2529 // Make the output overlap to please the register allocator. This greatly simplifies
2530 // the validation of the linear scan implementation
2531 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2532 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002533 case DataType::Type::kInt64: {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002534 HInstruction* input = conversion->InputAt(0);
2535 Location input_location = input->IsConstant()
2536 ? Location::ConstantLocation(input->AsConstant())
2537 : Location::RegisterPairLocation(EAX, EDX);
2538 locations->SetInAt(0, input_location);
2539 // Make the output overlap to please the register allocator. This greatly simplifies
2540 // the validation of the linear scan implementation
2541 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2542 break;
2543 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002544
2545 default:
2546 LOG(FATAL) << "Unexpected type conversion from " << input_type
2547 << " to " << result_type;
2548 }
2549 break;
2550
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002551 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002552 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002553 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2554 locations->SetInAt(0, Location::Any());
2555 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002556 break;
2557
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002558 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002559 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002560 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002561 locations->SetInAt(0, Location::Any());
2562 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2563 break;
2564
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002565 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002566 locations->SetInAt(0, Location::RequiresFpuRegister());
2567 locations->SetOut(Location::RequiresRegister());
2568 locations->AddTemp(Location::RequiresFpuRegister());
2569 break;
2570
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002571 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002572 locations->SetInAt(0, Location::RequiresFpuRegister());
2573 locations->SetOut(Location::RequiresRegister());
2574 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002575 break;
2576
2577 default:
2578 LOG(FATAL) << "Unexpected type conversion from " << input_type
2579 << " to " << result_type;
2580 }
2581 break;
2582
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002583 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002584 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002585 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002586 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002587 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002588 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002589 case DataType::Type::kInt16:
2590 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002591 locations->SetInAt(0, Location::RegisterLocation(EAX));
2592 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2593 break;
2594
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002595 case DataType::Type::kFloat32:
2596 case DataType::Type::kFloat64: {
Vladimir Marko949c91f2015-01-27 10:48:44 +00002597 InvokeRuntimeCallingConvention calling_convention;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002598 XmmRegister parameter = calling_convention.GetFpuRegisterAt(0);
2599 locations->SetInAt(0, Location::FpuRegisterLocation(parameter));
2600
Vladimir Marko949c91f2015-01-27 10:48:44 +00002601 // The runtime helper puts the result in EAX, EDX.
2602 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Vladimir Marko949c91f2015-01-27 10:48:44 +00002603 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002604 break;
Roland Levillaindff1f282014-11-05 14:15:05 +00002605
2606 default:
2607 LOG(FATAL) << "Unexpected type conversion from " << input_type
2608 << " to " << result_type;
2609 }
2610 break;
2611
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002612 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002613 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002614 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002615 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002616 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002617 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002618 case DataType::Type::kInt16:
2619 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00002620 locations->SetInAt(0, Location::RequiresRegister());
2621 locations->SetOut(Location::RequiresFpuRegister());
2622 break;
2623
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002624 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01002625 locations->SetInAt(0, Location::Any());
2626 locations->SetOut(Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002627 break;
2628
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002629 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002630 locations->SetInAt(0, Location::RequiresFpuRegister());
2631 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002632 break;
2633
2634 default:
2635 LOG(FATAL) << "Unexpected type conversion from " << input_type
2636 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002637 }
Roland Levillaincff13742014-11-17 14:32:17 +00002638 break;
2639
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002640 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002641 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002642 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002643 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002644 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002645 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002646 case DataType::Type::kInt16:
2647 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00002648 locations->SetInAt(0, Location::RequiresRegister());
2649 locations->SetOut(Location::RequiresFpuRegister());
2650 break;
2651
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002652 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01002653 locations->SetInAt(0, Location::Any());
2654 locations->SetOut(Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002655 break;
2656
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002657 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002658 locations->SetInAt(0, Location::RequiresFpuRegister());
2659 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002660 break;
2661
2662 default:
2663 LOG(FATAL) << "Unexpected type conversion from " << input_type
2664 << " to " << result_type;
2665 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002666 break;
2667
2668 default:
2669 LOG(FATAL) << "Unexpected type conversion from " << input_type
2670 << " to " << result_type;
2671 }
2672}
2673
2674void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
2675 LocationSummary* locations = conversion->GetLocations();
2676 Location out = locations->Out();
2677 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002678 DataType::Type result_type = conversion->GetResultType();
2679 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002680 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2681 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002682 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002683 case DataType::Type::kUint8:
2684 switch (input_type) {
2685 case DataType::Type::kInt8:
2686 case DataType::Type::kUint16:
2687 case DataType::Type::kInt16:
2688 case DataType::Type::kInt32:
2689 if (in.IsRegister()) {
2690 __ movzxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
2691 } else {
2692 DCHECK(in.GetConstant()->IsIntConstant());
2693 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2694 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
2695 }
2696 break;
2697 case DataType::Type::kInt64:
2698 if (in.IsRegisterPair()) {
2699 __ movzxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2700 } else {
2701 DCHECK(in.GetConstant()->IsLongConstant());
2702 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2703 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
2704 }
2705 break;
2706
2707 default:
2708 LOG(FATAL) << "Unexpected type conversion from " << input_type
2709 << " to " << result_type;
2710 }
2711 break;
2712
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002713 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002714 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002715 case DataType::Type::kUint8:
2716 case DataType::Type::kUint16:
2717 case DataType::Type::kInt16:
2718 case DataType::Type::kInt32:
2719 if (in.IsRegister()) {
2720 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
2721 } else {
2722 DCHECK(in.GetConstant()->IsIntConstant());
2723 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2724 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2725 }
2726 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002727 case DataType::Type::kInt64:
Vladimir Markob52bbde2016-02-12 12:06:05 +00002728 if (in.IsRegisterPair()) {
2729 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2730 } else {
2731 DCHECK(in.GetConstant()->IsLongConstant());
2732 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2733 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2734 }
2735 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002736
2737 default:
2738 LOG(FATAL) << "Unexpected type conversion from " << input_type
2739 << " to " << result_type;
2740 }
2741 break;
2742
2743 case DataType::Type::kUint16:
2744 switch (input_type) {
2745 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002746 case DataType::Type::kInt16:
2747 case DataType::Type::kInt32:
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002748 if (in.IsRegister()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002749 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
2750 } else if (in.IsStackSlot()) {
2751 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002752 } else {
2753 DCHECK(in.GetConstant()->IsIntConstant());
2754 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002755 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2756 }
2757 break;
2758 case DataType::Type::kInt64:
2759 if (in.IsRegisterPair()) {
2760 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2761 } else if (in.IsDoubleStackSlot()) {
2762 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2763 } else {
2764 DCHECK(in.GetConstant()->IsLongConstant());
2765 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2766 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002767 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002768 break;
2769
2770 default:
2771 LOG(FATAL) << "Unexpected type conversion from " << input_type
2772 << " to " << result_type;
2773 }
2774 break;
2775
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002776 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002777 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002778 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002779 case DataType::Type::kInt32:
Roland Levillain01a8d712014-11-14 16:27:39 +00002780 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002781 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002782 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002783 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain01a8d712014-11-14 16:27:39 +00002784 } else {
2785 DCHECK(in.GetConstant()->IsIntConstant());
2786 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002787 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
Roland Levillain01a8d712014-11-14 16:27:39 +00002788 }
2789 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002790 case DataType::Type::kInt64:
2791 if (in.IsRegisterPair()) {
2792 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2793 } else if (in.IsDoubleStackSlot()) {
2794 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2795 } else {
2796 DCHECK(in.GetConstant()->IsLongConstant());
2797 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2798 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2799 }
2800 break;
Roland Levillain01a8d712014-11-14 16:27:39 +00002801
2802 default:
2803 LOG(FATAL) << "Unexpected type conversion from " << input_type
2804 << " to " << result_type;
2805 }
2806 break;
2807
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002808 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002809 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002810 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002811 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002812 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00002813 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002814 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain946e1432014-11-11 17:35:19 +00002815 } else {
2816 DCHECK(in.IsConstant());
2817 DCHECK(in.GetConstant()->IsLongConstant());
2818 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002819 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002820 }
2821 break;
2822
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002823 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002824 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2825 Register output = out.AsRegister<Register>();
2826 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002827 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002828
2829 __ movl(output, Immediate(kPrimIntMax));
2830 // temp = int-to-float(output)
2831 __ cvtsi2ss(temp, output);
2832 // if input >= temp goto done
2833 __ comiss(input, temp);
2834 __ j(kAboveEqual, &done);
2835 // if input == NaN goto nan
2836 __ j(kUnordered, &nan);
2837 // output = float-to-int-truncate(input)
2838 __ cvttss2si(output, input);
2839 __ jmp(&done);
2840 __ Bind(&nan);
2841 // output = 0
2842 __ xorl(output, output);
2843 __ Bind(&done);
2844 break;
2845 }
2846
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002847 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002848 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2849 Register output = out.AsRegister<Register>();
2850 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002851 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002852
2853 __ movl(output, Immediate(kPrimIntMax));
2854 // temp = int-to-double(output)
2855 __ cvtsi2sd(temp, output);
2856 // if input >= temp goto done
2857 __ comisd(input, temp);
2858 __ j(kAboveEqual, &done);
2859 // if input == NaN goto nan
2860 __ j(kUnordered, &nan);
2861 // output = double-to-int-truncate(input)
2862 __ cvttsd2si(output, input);
2863 __ jmp(&done);
2864 __ Bind(&nan);
2865 // output = 0
2866 __ xorl(output, output);
2867 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002868 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002869 }
Roland Levillain946e1432014-11-11 17:35:19 +00002870
2871 default:
2872 LOG(FATAL) << "Unexpected type conversion from " << input_type
2873 << " to " << result_type;
2874 }
2875 break;
2876
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002877 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002878 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002879 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002880 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002881 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002882 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002883 case DataType::Type::kInt16:
2884 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002885 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
2886 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002887 DCHECK_EQ(in.AsRegister<Register>(), EAX);
Roland Levillaindff1f282014-11-05 14:15:05 +00002888 __ cdq();
2889 break;
2890
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002891 case DataType::Type::kFloat32:
Serban Constantinescuba45db02016-07-12 22:53:02 +01002892 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002893 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00002894 break;
2895
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002896 case DataType::Type::kFloat64:
Serban Constantinescuba45db02016-07-12 22:53:02 +01002897 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002898 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00002899 break;
2900
2901 default:
2902 LOG(FATAL) << "Unexpected type conversion from " << input_type
2903 << " to " << result_type;
2904 }
2905 break;
2906
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002907 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002908 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002909 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002910 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002911 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002912 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002913 case DataType::Type::kInt16:
2914 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002915 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002916 break;
2917
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002918 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01002919 size_t adjustment = 0;
Roland Levillain6d0e4832014-11-27 18:31:21 +00002920
Roland Levillain232ade02015-04-20 15:14:36 +01002921 // Create stack space for the call to
2922 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstps below.
2923 // TODO: enhance register allocator to ask for stack temporaries.
2924 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002925 adjustment = DataType::Size(DataType::Type::kInt64);
Roland Levillain232ade02015-04-20 15:14:36 +01002926 __ subl(ESP, Immediate(adjustment));
2927 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002928
Roland Levillain232ade02015-04-20 15:14:36 +01002929 // Load the value to the FP stack, using temporaries if needed.
2930 PushOntoFPStack(in, 0, adjustment, false, true);
2931
2932 if (out.IsStackSlot()) {
2933 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
2934 } else {
2935 __ fstps(Address(ESP, 0));
2936 Location stack_temp = Location::StackSlot(0);
2937 codegen_->Move32(out, stack_temp);
2938 }
2939
2940 // Remove the temporary stack space we allocated.
2941 if (adjustment != 0) {
2942 __ addl(ESP, Immediate(adjustment));
2943 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002944 break;
2945 }
2946
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002947 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002948 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002949 break;
2950
2951 default:
2952 LOG(FATAL) << "Unexpected type conversion from " << input_type
2953 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002954 }
Roland Levillaincff13742014-11-17 14:32:17 +00002955 break;
2956
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002957 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002958 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002959 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002960 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002961 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002962 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002963 case DataType::Type::kInt16:
2964 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002965 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002966 break;
2967
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002968 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01002969 size_t adjustment = 0;
Roland Levillain647b9ed2014-11-27 12:06:00 +00002970
Roland Levillain232ade02015-04-20 15:14:36 +01002971 // Create stack space for the call to
2972 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstpl below.
2973 // TODO: enhance register allocator to ask for stack temporaries.
2974 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002975 adjustment = DataType::Size(DataType::Type::kInt64);
Roland Levillain232ade02015-04-20 15:14:36 +01002976 __ subl(ESP, Immediate(adjustment));
2977 }
2978
2979 // Load the value to the FP stack, using temporaries if needed.
2980 PushOntoFPStack(in, 0, adjustment, false, true);
2981
2982 if (out.IsDoubleStackSlot()) {
2983 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
2984 } else {
2985 __ fstpl(Address(ESP, 0));
2986 Location stack_temp = Location::DoubleStackSlot(0);
2987 codegen_->Move64(out, stack_temp);
2988 }
2989
2990 // Remove the temporary stack space we allocated.
2991 if (adjustment != 0) {
2992 __ addl(ESP, Immediate(adjustment));
2993 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002994 break;
2995 }
2996
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002997 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002998 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002999 break;
3000
3001 default:
3002 LOG(FATAL) << "Unexpected type conversion from " << input_type
3003 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003004 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003005 break;
3006
3007 default:
3008 LOG(FATAL) << "Unexpected type conversion from " << input_type
3009 << " to " << result_type;
3010 }
3011}
3012
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003013void LocationsBuilderX86::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003014 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003015 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003016 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003017 case DataType::Type::kInt32: {
Mark Mendell09b84632015-02-13 17:48:38 -05003018 locations->SetInAt(0, Location::RequiresRegister());
3019 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3020 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3021 break;
3022 }
3023
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003024 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003025 locations->SetInAt(0, Location::RequiresRegister());
3026 locations->SetInAt(1, Location::Any());
3027 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003028 break;
3029 }
3030
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003031 case DataType::Type::kFloat32:
3032 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003033 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003034 if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3035 DCHECK(add->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003036 } else if (add->InputAt(1)->IsConstant()) {
3037 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003038 } else {
3039 locations->SetInAt(1, Location::Any());
3040 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003041 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003042 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003043 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003044
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003045 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003046 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Elliott Hughesc1896c92018-11-29 11:33:18 -08003047 UNREACHABLE();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003048 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003049}
3050
3051void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
3052 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003053 Location first = locations->InAt(0);
3054 Location second = locations->InAt(1);
Mark Mendell09b84632015-02-13 17:48:38 -05003055 Location out = locations->Out();
3056
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003057 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003058 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003059 if (second.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003060 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3061 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003062 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3063 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
Mark Mendell09b84632015-02-13 17:48:38 -05003064 } else {
3065 __ leal(out.AsRegister<Register>(), Address(
3066 first.AsRegister<Register>(), second.AsRegister<Register>(), TIMES_1, 0));
3067 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003068 } else if (second.IsConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003069 int32_t value = second.GetConstant()->AsIntConstant()->GetValue();
3070 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3071 __ addl(out.AsRegister<Register>(), Immediate(value));
3072 } else {
3073 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
3074 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003075 } else {
Mark Mendell09b84632015-02-13 17:48:38 -05003076 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003077 __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003078 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003079 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003080 }
3081
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003082 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003083 if (second.IsRegisterPair()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003084 __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3085 __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003086 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003087 __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3088 __ adcl(first.AsRegisterPairHigh<Register>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003089 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003090 } else {
3091 DCHECK(second.IsConstant()) << second;
3092 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3093 __ addl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3094 __ adcl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003095 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003096 break;
3097 }
3098
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003099 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003100 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003101 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003102 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3103 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003104 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003105 __ addss(first.AsFpuRegister<XmmRegister>(),
3106 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003107 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3108 const_area->GetBaseMethodAddress(),
3109 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003110 } else {
3111 DCHECK(second.IsStackSlot());
3112 __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003113 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003114 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003115 }
3116
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003117 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003118 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003119 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003120 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3121 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003122 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003123 __ addsd(first.AsFpuRegister<XmmRegister>(),
3124 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003125 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3126 const_area->GetBaseMethodAddress(),
3127 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003128 } else {
3129 DCHECK(second.IsDoubleStackSlot());
3130 __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003131 }
3132 break;
3133 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003134
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003135 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003136 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003137 }
3138}
3139
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003140void LocationsBuilderX86::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003141 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003142 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003143 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003144 case DataType::Type::kInt32:
3145 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003146 locations->SetInAt(0, Location::RequiresRegister());
3147 locations->SetInAt(1, Location::Any());
3148 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003149 break;
3150 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003151 case DataType::Type::kFloat32:
3152 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003153 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003154 if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3155 DCHECK(sub->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003156 } else if (sub->InputAt(1)->IsConstant()) {
3157 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003158 } else {
3159 locations->SetInAt(1, Location::Any());
3160 }
Calin Juravle11351682014-10-23 15:38:15 +01003161 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003162 break;
Calin Juravle11351682014-10-23 15:38:15 +01003163 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003164
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003165 default:
Calin Juravle11351682014-10-23 15:38:15 +01003166 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003167 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003168}
3169
3170void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
3171 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003172 Location first = locations->InAt(0);
3173 Location second = locations->InAt(1);
Calin Juravle11351682014-10-23 15:38:15 +01003174 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003175 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003176 case DataType::Type::kInt32: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003177 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003178 __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003179 } else if (second.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00003180 __ subl(first.AsRegister<Register>(),
3181 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003182 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003183 __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003184 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003185 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003186 }
3187
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003188 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003189 if (second.IsRegisterPair()) {
Calin Juravle11351682014-10-23 15:38:15 +01003190 __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3191 __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003192 } else if (second.IsDoubleStackSlot()) {
Calin Juravle11351682014-10-23 15:38:15 +01003193 __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003194 __ sbbl(first.AsRegisterPairHigh<Register>(),
3195 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003196 } else {
3197 DCHECK(second.IsConstant()) << second;
3198 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3199 __ subl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3200 __ sbbl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003201 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003202 break;
3203 }
3204
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003205 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003206 if (second.IsFpuRegister()) {
3207 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3208 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3209 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003210 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003211 __ subss(first.AsFpuRegister<XmmRegister>(),
3212 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003213 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3214 const_area->GetBaseMethodAddress(),
3215 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003216 } else {
3217 DCHECK(second.IsStackSlot());
3218 __ subss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3219 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003220 break;
Calin Juravle11351682014-10-23 15:38:15 +01003221 }
3222
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003223 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003224 if (second.IsFpuRegister()) {
3225 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3226 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3227 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003228 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003229 __ subsd(first.AsFpuRegister<XmmRegister>(),
3230 codegen_->LiteralDoubleAddress(
3231 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003232 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003233 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3234 } else {
3235 DCHECK(second.IsDoubleStackSlot());
3236 __ subsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3237 }
Calin Juravle11351682014-10-23 15:38:15 +01003238 break;
3239 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003240
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003241 default:
Calin Juravle11351682014-10-23 15:38:15 +01003242 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003243 }
3244}
3245
Calin Juravle34bacdf2014-10-07 20:23:36 +01003246void LocationsBuilderX86::VisitMul(HMul* mul) {
3247 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003248 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003249 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003250 case DataType::Type::kInt32:
Calin Juravle34bacdf2014-10-07 20:23:36 +01003251 locations->SetInAt(0, Location::RequiresRegister());
3252 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003253 if (mul->InputAt(1)->IsIntConstant()) {
3254 // Can use 3 operand multiply.
3255 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3256 } else {
3257 locations->SetOut(Location::SameAsFirstInput());
3258 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003259 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003260 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003261 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003262 locations->SetInAt(1, Location::Any());
3263 locations->SetOut(Location::SameAsFirstInput());
3264 // Needed for imul on 32bits with 64bits output.
3265 locations->AddTemp(Location::RegisterLocation(EAX));
3266 locations->AddTemp(Location::RegisterLocation(EDX));
3267 break;
3268 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003269 case DataType::Type::kFloat32:
3270 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003271 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003272 if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3273 DCHECK(mul->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003274 } else if (mul->InputAt(1)->IsConstant()) {
3275 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003276 } else {
3277 locations->SetInAt(1, Location::Any());
3278 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003279 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003280 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003281 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003282
3283 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003284 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003285 }
3286}
3287
3288void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
3289 LocationSummary* locations = mul->GetLocations();
3290 Location first = locations->InAt(0);
3291 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003292 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003293
3294 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003295 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003296 // The constant may have ended up in a register, so test explicitly to avoid
3297 // problems where the output may not be the same as the first operand.
3298 if (mul->InputAt(1)->IsIntConstant()) {
3299 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3300 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3301 } else if (second.IsRegister()) {
3302 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003303 __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003304 } else {
3305 DCHECK(second.IsStackSlot());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003306 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003307 __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003308 }
3309 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003310
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003311 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003312 Register in1_hi = first.AsRegisterPairHigh<Register>();
3313 Register in1_lo = first.AsRegisterPairLow<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003314 Register eax = locations->GetTemp(0).AsRegister<Register>();
3315 Register edx = locations->GetTemp(1).AsRegister<Register>();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003316
3317 DCHECK_EQ(EAX, eax);
3318 DCHECK_EQ(EDX, edx);
3319
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003320 // input: in1 - 64 bits, in2 - 64 bits.
Calin Juravle34bacdf2014-10-07 20:23:36 +01003321 // output: in1
3322 // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3323 // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3324 // parts: in1.lo = (in1.lo * in2.lo)[31:0]
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003325 if (second.IsConstant()) {
3326 DCHECK(second.GetConstant()->IsLongConstant());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003327
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003328 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3329 int32_t low_value = Low32Bits(value);
3330 int32_t high_value = High32Bits(value);
3331 Immediate low(low_value);
3332 Immediate high(high_value);
3333
3334 __ movl(eax, high);
3335 // eax <- in1.lo * in2.hi
3336 __ imull(eax, in1_lo);
3337 // in1.hi <- in1.hi * in2.lo
3338 __ imull(in1_hi, low);
3339 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3340 __ addl(in1_hi, eax);
3341 // move in2_lo to eax to prepare for double precision
3342 __ movl(eax, low);
3343 // edx:eax <- in1.lo * in2.lo
3344 __ mull(in1_lo);
3345 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3346 __ addl(in1_hi, edx);
3347 // in1.lo <- (in1.lo * in2.lo)[31:0];
3348 __ movl(in1_lo, eax);
3349 } else if (second.IsRegisterPair()) {
3350 Register in2_hi = second.AsRegisterPairHigh<Register>();
3351 Register in2_lo = second.AsRegisterPairLow<Register>();
3352
3353 __ movl(eax, in2_hi);
3354 // eax <- in1.lo * in2.hi
3355 __ imull(eax, in1_lo);
3356 // in1.hi <- in1.hi * in2.lo
3357 __ imull(in1_hi, in2_lo);
3358 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3359 __ addl(in1_hi, eax);
3360 // move in1_lo to eax to prepare for double precision
3361 __ movl(eax, in1_lo);
3362 // edx:eax <- in1.lo * in2.lo
3363 __ mull(in2_lo);
3364 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3365 __ addl(in1_hi, edx);
3366 // in1.lo <- (in1.lo * in2.lo)[31:0];
3367 __ movl(in1_lo, eax);
3368 } else {
3369 DCHECK(second.IsDoubleStackSlot()) << second;
3370 Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
3371 Address in2_lo(ESP, second.GetStackIndex());
3372
3373 __ movl(eax, in2_hi);
3374 // eax <- in1.lo * in2.hi
3375 __ imull(eax, in1_lo);
3376 // in1.hi <- in1.hi * in2.lo
3377 __ imull(in1_hi, in2_lo);
3378 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3379 __ addl(in1_hi, eax);
3380 // move in1_lo to eax to prepare for double precision
3381 __ movl(eax, in1_lo);
3382 // edx:eax <- in1.lo * in2.lo
3383 __ mull(in2_lo);
3384 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3385 __ addl(in1_hi, edx);
3386 // in1.lo <- (in1.lo * in2.lo)[31:0];
3387 __ movl(in1_lo, eax);
3388 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003389
3390 break;
3391 }
3392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003393 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003394 DCHECK(first.Equals(locations->Out()));
3395 if (second.IsFpuRegister()) {
3396 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3397 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3398 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003399 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003400 __ mulss(first.AsFpuRegister<XmmRegister>(),
3401 codegen_->LiteralFloatAddress(
3402 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003403 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003404 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3405 } else {
3406 DCHECK(second.IsStackSlot());
3407 __ mulss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3408 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003409 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003410 }
3411
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003412 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003413 DCHECK(first.Equals(locations->Out()));
3414 if (second.IsFpuRegister()) {
3415 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3416 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3417 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003418 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003419 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3420 codegen_->LiteralDoubleAddress(
3421 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003422 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003423 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3424 } else {
3425 DCHECK(second.IsDoubleStackSlot());
3426 __ mulsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3427 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003428 break;
3429 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003430
3431 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003432 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003433 }
3434}
3435
Roland Levillain232ade02015-04-20 15:14:36 +01003436void InstructionCodeGeneratorX86::PushOntoFPStack(Location source,
3437 uint32_t temp_offset,
3438 uint32_t stack_adjustment,
3439 bool is_fp,
3440 bool is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003441 if (source.IsStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003442 DCHECK(!is_wide);
3443 if (is_fp) {
3444 __ flds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3445 } else {
3446 __ filds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3447 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003448 } else if (source.IsDoubleStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003449 DCHECK(is_wide);
3450 if (is_fp) {
3451 __ fldl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3452 } else {
3453 __ fildl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3454 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003455 } else {
3456 // Write the value to the temporary location on the stack and load to FP stack.
Roland Levillain232ade02015-04-20 15:14:36 +01003457 if (!is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003458 Location stack_temp = Location::StackSlot(temp_offset);
3459 codegen_->Move32(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003460 if (is_fp) {
3461 __ flds(Address(ESP, temp_offset));
3462 } else {
3463 __ filds(Address(ESP, temp_offset));
3464 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003465 } else {
3466 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3467 codegen_->Move64(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003468 if (is_fp) {
3469 __ fldl(Address(ESP, temp_offset));
3470 } else {
3471 __ fildl(Address(ESP, temp_offset));
3472 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003473 }
3474 }
3475}
3476
3477void InstructionCodeGeneratorX86::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003478 DataType::Type type = rem->GetResultType();
3479 bool is_float = type == DataType::Type::kFloat32;
3480 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003481 LocationSummary* locations = rem->GetLocations();
3482 Location first = locations->InAt(0);
3483 Location second = locations->InAt(1);
3484 Location out = locations->Out();
3485
3486 // Create stack space for 2 elements.
3487 // TODO: enhance register allocator to ask for stack temporaries.
3488 __ subl(ESP, Immediate(2 * elem_size));
3489
3490 // Load the values to the FP stack in reverse order, using temporaries if needed.
Roland Levillain232ade02015-04-20 15:14:36 +01003491 const bool is_wide = !is_float;
Andreas Gampe3db70682018-12-26 15:12:03 -08003492 PushOntoFPStack(second, elem_size, 2 * elem_size, /* is_fp= */ true, is_wide);
3493 PushOntoFPStack(first, 0, 2 * elem_size, /* is_fp= */ true, is_wide);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003494
3495 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003496 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003497 __ Bind(&retry);
3498 __ fprem();
3499
3500 // Move FP status to AX.
3501 __ fstsw();
3502
3503 // And see if the argument reduction is complete. This is signaled by the
3504 // C2 FPU flag bit set to 0.
3505 __ andl(EAX, Immediate(kC2ConditionMask));
3506 __ j(kNotEqual, &retry);
3507
3508 // We have settled on the final value. Retrieve it into an XMM register.
3509 // Store FP top of stack to real stack.
3510 if (is_float) {
3511 __ fsts(Address(ESP, 0));
3512 } else {
3513 __ fstl(Address(ESP, 0));
3514 }
3515
3516 // Pop the 2 items from the FP stack.
3517 __ fucompp();
3518
3519 // Load the value from the stack into an XMM register.
3520 DCHECK(out.IsFpuRegister()) << out;
3521 if (is_float) {
3522 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3523 } else {
3524 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3525 }
3526
3527 // And remove the temporary stack space we allocated.
3528 __ addl(ESP, Immediate(2 * elem_size));
3529}
3530
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003531
3532void InstructionCodeGeneratorX86::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3533 DCHECK(instruction->IsDiv() || instruction->IsRem());
3534
3535 LocationSummary* locations = instruction->GetLocations();
3536 DCHECK(locations->InAt(1).IsConstant());
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003537 DCHECK(locations->InAt(1).GetConstant()->IsIntConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003538
3539 Register out_register = locations->Out().AsRegister<Register>();
3540 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003541 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003542
3543 DCHECK(imm == 1 || imm == -1);
3544
3545 if (instruction->IsRem()) {
3546 __ xorl(out_register, out_register);
3547 } else {
3548 __ movl(out_register, input_register);
3549 if (imm == -1) {
3550 __ negl(out_register);
3551 }
3552 }
3553}
3554
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303555void InstructionCodeGeneratorX86::RemByPowerOfTwo(HRem* instruction) {
3556 LocationSummary* locations = instruction->GetLocations();
3557 Location second = locations->InAt(1);
3558
3559 Register out = locations->Out().AsRegister<Register>();
3560 Register numerator = locations->InAt(0).AsRegister<Register>();
3561
3562 int32_t imm = Int64FromConstant(second.GetConstant());
3563 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3564 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
3565
3566 Register tmp = locations->GetTemp(0).AsRegister<Register>();
3567 NearLabel done;
3568 __ movl(out, numerator);
3569 __ andl(out, Immediate(abs_imm-1));
3570 __ j(Condition::kZero, &done);
3571 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3572 __ testl(numerator, numerator);
3573 __ cmovl(Condition::kLess, out, tmp);
3574 __ Bind(&done);
3575}
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003576
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003577void InstructionCodeGeneratorX86::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578 LocationSummary* locations = instruction->GetLocations();
3579
3580 Register out_register = locations->Out().AsRegister<Register>();
3581 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003582 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003583 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3584 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003585
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003586 Register num = locations->GetTemp(0).AsRegister<Register>();
3587
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003588 __ leal(num, Address(input_register, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003589 __ testl(input_register, input_register);
3590 __ cmovl(kGreaterEqual, num, input_register);
3591 int shift = CTZ(imm);
3592 __ sarl(num, Immediate(shift));
3593
3594 if (imm < 0) {
3595 __ negl(num);
3596 }
3597
3598 __ movl(out_register, num);
3599}
3600
3601void InstructionCodeGeneratorX86::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3602 DCHECK(instruction->IsDiv() || instruction->IsRem());
3603
3604 LocationSummary* locations = instruction->GetLocations();
3605 int imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
3606
3607 Register eax = locations->InAt(0).AsRegister<Register>();
3608 Register out = locations->Out().AsRegister<Register>();
3609 Register num;
3610 Register edx;
3611
3612 if (instruction->IsDiv()) {
3613 edx = locations->GetTemp(0).AsRegister<Register>();
3614 num = locations->GetTemp(1).AsRegister<Register>();
3615 } else {
3616 edx = locations->Out().AsRegister<Register>();
3617 num = locations->GetTemp(0).AsRegister<Register>();
3618 }
3619
3620 DCHECK_EQ(EAX, eax);
3621 DCHECK_EQ(EDX, edx);
3622 if (instruction->IsDiv()) {
3623 DCHECK_EQ(EAX, out);
3624 } else {
3625 DCHECK_EQ(EDX, out);
3626 }
3627
3628 int64_t magic;
3629 int shift;
Andreas Gampe3db70682018-12-26 15:12:03 -08003630 CalculateMagicAndShiftForDivRem(imm, /* is_long= */ false, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003631
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003632 // Save the numerator.
3633 __ movl(num, eax);
3634
3635 // EAX = magic
3636 __ movl(eax, Immediate(magic));
3637
3638 // EDX:EAX = magic * numerator
3639 __ imull(num);
3640
3641 if (imm > 0 && magic < 0) {
3642 // EDX += num
3643 __ addl(edx, num);
3644 } else if (imm < 0 && magic > 0) {
3645 __ subl(edx, num);
3646 }
3647
3648 // Shift if needed.
3649 if (shift != 0) {
3650 __ sarl(edx, Immediate(shift));
3651 }
3652
3653 // EDX += 1 if EDX < 0
3654 __ movl(eax, edx);
3655 __ shrl(edx, Immediate(31));
3656 __ addl(edx, eax);
3657
3658 if (instruction->IsRem()) {
3659 __ movl(eax, num);
3660 __ imull(edx, Immediate(imm));
3661 __ subl(eax, edx);
3662 __ movl(edx, eax);
3663 } else {
3664 __ movl(eax, edx);
3665 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003666}
3667
Calin Juravlebacfec32014-11-14 15:54:36 +00003668void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3669 DCHECK(instruction->IsDiv() || instruction->IsRem());
3670
3671 LocationSummary* locations = instruction->GetLocations();
3672 Location out = locations->Out();
3673 Location first = locations->InAt(0);
3674 Location second = locations->InAt(1);
3675 bool is_div = instruction->IsDiv();
3676
3677 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003678 case DataType::Type::kInt32: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003679 DCHECK_EQ(EAX, first.AsRegister<Register>());
3680 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
Calin Juravlebacfec32014-11-14 15:54:36 +00003681
Vladimir Marko13c86fd2015-11-11 12:37:46 +00003682 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003683 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003684
3685 if (imm == 0) {
3686 // Do not generate anything for 0. DivZeroCheck would forbid any generated code.
3687 } else if (imm == 1 || imm == -1) {
3688 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303689 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3690 if (is_div) {
3691 DivByPowerOfTwo(instruction->AsDiv());
3692 } else {
3693 RemByPowerOfTwo(instruction->AsRem());
3694 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003695 } else {
3696 DCHECK(imm <= -2 || imm >= 2);
3697 GenerateDivRemWithAnyConstant(instruction);
3698 }
3699 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01003700 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86(
David Srbecky9cd6d372016-02-09 15:24:47 +00003701 instruction, out.AsRegister<Register>(), is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003702 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003703
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003704 Register second_reg = second.AsRegister<Register>();
3705 // 0x80000000/-1 triggers an arithmetic exception!
3706 // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
3707 // it's safe to just use negl instead of more complex comparisons.
Calin Juravlebacfec32014-11-14 15:54:36 +00003708
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003709 __ cmpl(second_reg, Immediate(-1));
3710 __ j(kEqual, slow_path->GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +00003711
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003712 // edx:eax <- sign-extended of eax
3713 __ cdq();
3714 // eax = quotient, edx = remainder
3715 __ idivl(second_reg);
3716 __ Bind(slow_path->GetExitLabel());
3717 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003718 break;
3719 }
3720
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003721 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003722 InvokeRuntimeCallingConvention calling_convention;
3723 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
3724 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
3725 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
3726 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
3727 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
3728 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
3729
3730 if (is_div) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003731 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003732 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003733 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003734 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003735 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003736 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003737 break;
3738 }
3739
3740 default:
3741 LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
3742 }
3743}
3744
Calin Juravle7c4954d2014-10-28 16:57:40 +00003745void LocationsBuilderX86::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003746 LocationSummary::CallKind call_kind = (div->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003747 ? LocationSummary::kCallOnMainOnly
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003748 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01003749 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(div, call_kind);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003750
Calin Juravle7c4954d2014-10-28 16:57:40 +00003751 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003752 case DataType::Type::kInt32: {
Calin Juravled0d48522014-11-04 16:40:20 +00003753 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003754 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003755 locations->SetOut(Location::SameAsFirstInput());
3756 // Intel uses edx:eax as the dividend.
3757 locations->AddTemp(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003758 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3759 // which enforces results to be in EAX and EDX, things are simpler if we use EAX also as
3760 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003761 if (div->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003762 locations->AddTemp(Location::RequiresRegister());
3763 }
Calin Juravled0d48522014-11-04 16:40:20 +00003764 break;
3765 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003766 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003767 InvokeRuntimeCallingConvention calling_convention;
3768 locations->SetInAt(0, Location::RegisterPairLocation(
3769 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3770 locations->SetInAt(1, Location::RegisterPairLocation(
3771 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3772 // Runtime helper puts the result in EAX, EDX.
3773 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Calin Juravle7c4954d2014-10-28 16:57:40 +00003774 break;
3775 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003776 case DataType::Type::kFloat32:
3777 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003778 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003779 if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3780 DCHECK(div->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003781 } else if (div->InputAt(1)->IsConstant()) {
3782 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003783 } else {
3784 locations->SetInAt(1, Location::Any());
3785 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003786 locations->SetOut(Location::SameAsFirstInput());
3787 break;
3788 }
3789
3790 default:
3791 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3792 }
3793}
3794
3795void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
3796 LocationSummary* locations = div->GetLocations();
3797 Location first = locations->InAt(0);
3798 Location second = locations->InAt(1);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003799
3800 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003801 case DataType::Type::kInt32:
3802 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003803 GenerateDivRemIntegral(div);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003804 break;
3805 }
3806
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003807 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003808 if (second.IsFpuRegister()) {
3809 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3810 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3811 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003812 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003813 __ divss(first.AsFpuRegister<XmmRegister>(),
3814 codegen_->LiteralFloatAddress(
3815 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003816 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003817 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3818 } else {
3819 DCHECK(second.IsStackSlot());
3820 __ divss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3821 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003822 break;
3823 }
3824
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003825 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003826 if (second.IsFpuRegister()) {
3827 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3828 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3829 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003830 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003831 __ divsd(first.AsFpuRegister<XmmRegister>(),
3832 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003833 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3834 const_area->GetBaseMethodAddress(),
3835 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003836 } else {
3837 DCHECK(second.IsDoubleStackSlot());
3838 __ divsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3839 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003840 break;
3841 }
3842
3843 default:
3844 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3845 }
3846}
3847
Calin Juravlebacfec32014-11-14 15:54:36 +00003848void LocationsBuilderX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003849 DataType::Type type = rem->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003850
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003851 LocationSummary::CallKind call_kind = (rem->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003852 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003853 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01003854 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Calin Juravlebacfec32014-11-14 15:54:36 +00003855
Calin Juravled2ec87d2014-12-08 14:24:46 +00003856 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003857 case DataType::Type::kInt32: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003858 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003859 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003860 locations->SetOut(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003861 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3862 // which enforces results to be in EAX and EDX, things are simpler if we use EDX also as
3863 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003864 if (rem->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003865 locations->AddTemp(Location::RequiresRegister());
3866 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003867 break;
3868 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003869 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003870 InvokeRuntimeCallingConvention calling_convention;
3871 locations->SetInAt(0, Location::RegisterPairLocation(
3872 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3873 locations->SetInAt(1, Location::RegisterPairLocation(
3874 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3875 // Runtime helper puts the result in EAX, EDX.
3876 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
3877 break;
3878 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003879 case DataType::Type::kFloat64:
3880 case DataType::Type::kFloat32: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003881 locations->SetInAt(0, Location::Any());
3882 locations->SetInAt(1, Location::Any());
3883 locations->SetOut(Location::RequiresFpuRegister());
3884 locations->AddTemp(Location::RegisterLocation(EAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003885 break;
3886 }
3887
3888 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003889 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003890 }
3891}
3892
3893void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003894 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003895 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003896 case DataType::Type::kInt32:
3897 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003898 GenerateDivRemIntegral(rem);
3899 break;
3900 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003901 case DataType::Type::kFloat32:
3902 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003903 GenerateRemFP(rem);
Calin Juravlebacfec32014-11-14 15:54:36 +00003904 break;
3905 }
3906 default:
3907 LOG(FATAL) << "Unexpected rem type " << type;
3908 }
3909}
3910
Aart Bik1f8d51b2018-02-15 10:42:37 -08003911static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
3912 LocationSummary* locations = new (allocator) LocationSummary(minmax);
3913 switch (minmax->GetResultType()) {
3914 case DataType::Type::kInt32:
3915 locations->SetInAt(0, Location::RequiresRegister());
3916 locations->SetInAt(1, Location::RequiresRegister());
3917 locations->SetOut(Location::SameAsFirstInput());
3918 break;
3919 case DataType::Type::kInt64:
3920 locations->SetInAt(0, Location::RequiresRegister());
3921 locations->SetInAt(1, Location::RequiresRegister());
3922 locations->SetOut(Location::SameAsFirstInput());
3923 // Register to use to perform a long subtract to set cc.
3924 locations->AddTemp(Location::RequiresRegister());
3925 break;
3926 case DataType::Type::kFloat32:
3927 locations->SetInAt(0, Location::RequiresFpuRegister());
3928 locations->SetInAt(1, Location::RequiresFpuRegister());
3929 locations->SetOut(Location::SameAsFirstInput());
3930 locations->AddTemp(Location::RequiresRegister());
3931 break;
3932 case DataType::Type::kFloat64:
3933 locations->SetInAt(0, Location::RequiresFpuRegister());
3934 locations->SetInAt(1, Location::RequiresFpuRegister());
3935 locations->SetOut(Location::SameAsFirstInput());
3936 break;
3937 default:
3938 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
3939 }
3940}
3941
Aart Bik351df3e2018-03-07 11:54:57 -08003942void InstructionCodeGeneratorX86::GenerateMinMaxInt(LocationSummary* locations,
3943 bool is_min,
3944 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08003945 Location op1_loc = locations->InAt(0);
3946 Location op2_loc = locations->InAt(1);
3947
3948 // Shortcut for same input locations.
3949 if (op1_loc.Equals(op2_loc)) {
3950 // Can return immediately, as op1_loc == out_loc.
3951 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
3952 // a copy here.
3953 DCHECK(locations->Out().Equals(op1_loc));
3954 return;
3955 }
3956
3957 if (type == DataType::Type::kInt64) {
3958 // Need to perform a subtract to get the sign right.
3959 // op1 is already in the same location as the output.
3960 Location output = locations->Out();
3961 Register output_lo = output.AsRegisterPairLow<Register>();
3962 Register output_hi = output.AsRegisterPairHigh<Register>();
3963
3964 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
3965 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
3966
3967 // The comparison is performed by subtracting the second operand from
3968 // the first operand and then setting the status flags in the same
3969 // manner as the SUB instruction."
3970 __ cmpl(output_lo, op2_lo);
3971
3972 // Now use a temp and the borrow to finish the subtraction of op2_hi.
3973 Register temp = locations->GetTemp(0).AsRegister<Register>();
3974 __ movl(temp, output_hi);
3975 __ sbbl(temp, op2_hi);
3976
3977 // Now the condition code is correct.
3978 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
3979 __ cmovl(cond, output_lo, op2_lo);
3980 __ cmovl(cond, output_hi, op2_hi);
3981 } else {
3982 DCHECK_EQ(type, DataType::Type::kInt32);
3983 Register out = locations->Out().AsRegister<Register>();
3984 Register op2 = op2_loc.AsRegister<Register>();
3985
3986 // (out := op1)
3987 // out <=? op2
3988 // if out is min jmp done
3989 // out := op2
3990 // done:
3991
3992 __ cmpl(out, op2);
3993 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
3994 __ cmovl(cond, out, op2);
3995 }
3996}
3997
3998void InstructionCodeGeneratorX86::GenerateMinMaxFP(LocationSummary* locations,
3999 bool is_min,
4000 DataType::Type type) {
4001 Location op1_loc = locations->InAt(0);
4002 Location op2_loc = locations->InAt(1);
4003 Location out_loc = locations->Out();
4004 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4005
4006 // Shortcut for same input locations.
4007 if (op1_loc.Equals(op2_loc)) {
4008 DCHECK(out_loc.Equals(op1_loc));
4009 return;
4010 }
4011
4012 // (out := op1)
4013 // out <=? op2
4014 // if Nan jmp Nan_label
4015 // if out is min jmp done
4016 // if op2 is min jmp op2_label
4017 // handle -0/+0
4018 // jmp done
4019 // Nan_label:
4020 // out := NaN
4021 // op2_label:
4022 // out := op2
4023 // done:
4024 //
4025 // This removes one jmp, but needs to copy one input (op1) to out.
4026 //
4027 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
4028
4029 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4030
4031 NearLabel nan, done, op2_label;
4032 if (type == DataType::Type::kFloat64) {
4033 __ ucomisd(out, op2);
4034 } else {
4035 DCHECK_EQ(type, DataType::Type::kFloat32);
4036 __ ucomiss(out, op2);
4037 }
4038
4039 __ j(Condition::kParityEven, &nan);
4040
4041 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4042 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4043
4044 // Handle 0.0/-0.0.
4045 if (is_min) {
4046 if (type == DataType::Type::kFloat64) {
4047 __ orpd(out, op2);
4048 } else {
4049 __ orps(out, op2);
4050 }
4051 } else {
4052 if (type == DataType::Type::kFloat64) {
4053 __ andpd(out, op2);
4054 } else {
4055 __ andps(out, op2);
4056 }
4057 }
4058 __ jmp(&done);
4059
4060 // NaN handling.
4061 __ Bind(&nan);
4062 if (type == DataType::Type::kFloat64) {
4063 // TODO: Use a constant from the constant table (requires extra input).
4064 __ LoadLongConstant(out, kDoubleNaN);
4065 } else {
4066 Register constant = locations->GetTemp(0).AsRegister<Register>();
4067 __ movl(constant, Immediate(kFloatNaN));
4068 __ movd(out, constant);
4069 }
4070 __ jmp(&done);
4071
4072 // out := op2;
4073 __ Bind(&op2_label);
4074 if (type == DataType::Type::kFloat64) {
4075 __ movsd(out, op2);
4076 } else {
4077 __ movss(out, op2);
4078 }
4079
4080 // Done.
4081 __ Bind(&done);
4082}
4083
Aart Bik351df3e2018-03-07 11:54:57 -08004084void InstructionCodeGeneratorX86::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4085 DataType::Type type = minmax->GetResultType();
4086 switch (type) {
4087 case DataType::Type::kInt32:
4088 case DataType::Type::kInt64:
4089 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4090 break;
4091 case DataType::Type::kFloat32:
4092 case DataType::Type::kFloat64:
4093 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4094 break;
4095 default:
4096 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4097 }
4098}
4099
Aart Bik1f8d51b2018-02-15 10:42:37 -08004100void LocationsBuilderX86::VisitMin(HMin* min) {
4101 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4102}
4103
4104void InstructionCodeGeneratorX86::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004105 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004106}
4107
4108void LocationsBuilderX86::VisitMax(HMax* max) {
4109 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4110}
4111
4112void InstructionCodeGeneratorX86::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004113 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004114}
4115
Aart Bik3dad3412018-02-28 12:01:46 -08004116void LocationsBuilderX86::VisitAbs(HAbs* abs) {
4117 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4118 switch (abs->GetResultType()) {
4119 case DataType::Type::kInt32:
4120 locations->SetInAt(0, Location::RegisterLocation(EAX));
4121 locations->SetOut(Location::SameAsFirstInput());
4122 locations->AddTemp(Location::RegisterLocation(EDX));
4123 break;
4124 case DataType::Type::kInt64:
4125 locations->SetInAt(0, Location::RequiresRegister());
4126 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4127 locations->AddTemp(Location::RequiresRegister());
4128 break;
4129 case DataType::Type::kFloat32:
4130 locations->SetInAt(0, Location::RequiresFpuRegister());
4131 locations->SetOut(Location::SameAsFirstInput());
4132 locations->AddTemp(Location::RequiresFpuRegister());
4133 locations->AddTemp(Location::RequiresRegister());
4134 break;
4135 case DataType::Type::kFloat64:
4136 locations->SetInAt(0, Location::RequiresFpuRegister());
4137 locations->SetOut(Location::SameAsFirstInput());
4138 locations->AddTemp(Location::RequiresFpuRegister());
4139 break;
4140 default:
4141 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4142 }
4143}
4144
4145void InstructionCodeGeneratorX86::VisitAbs(HAbs* abs) {
4146 LocationSummary* locations = abs->GetLocations();
4147 switch (abs->GetResultType()) {
4148 case DataType::Type::kInt32: {
4149 Register out = locations->Out().AsRegister<Register>();
4150 DCHECK_EQ(out, EAX);
4151 Register temp = locations->GetTemp(0).AsRegister<Register>();
4152 DCHECK_EQ(temp, EDX);
4153 // Sign extend EAX into EDX.
4154 __ cdq();
4155 // XOR EAX with sign.
4156 __ xorl(EAX, EDX);
4157 // Subtract out sign to correct.
4158 __ subl(EAX, EDX);
4159 // The result is in EAX.
4160 break;
4161 }
4162 case DataType::Type::kInt64: {
4163 Location input = locations->InAt(0);
4164 Register input_lo = input.AsRegisterPairLow<Register>();
4165 Register input_hi = input.AsRegisterPairHigh<Register>();
4166 Location output = locations->Out();
4167 Register output_lo = output.AsRegisterPairLow<Register>();
4168 Register output_hi = output.AsRegisterPairHigh<Register>();
4169 Register temp = locations->GetTemp(0).AsRegister<Register>();
4170 // Compute the sign into the temporary.
4171 __ movl(temp, input_hi);
4172 __ sarl(temp, Immediate(31));
4173 // Store the sign into the output.
4174 __ movl(output_lo, temp);
4175 __ movl(output_hi, temp);
4176 // XOR the input to the output.
4177 __ xorl(output_lo, input_lo);
4178 __ xorl(output_hi, input_hi);
4179 // Subtract the sign.
4180 __ subl(output_lo, temp);
4181 __ sbbl(output_hi, temp);
4182 break;
4183 }
4184 case DataType::Type::kFloat32: {
4185 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4186 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4187 Register constant = locations->GetTemp(1).AsRegister<Register>();
4188 __ movl(constant, Immediate(INT32_C(0x7FFFFFFF)));
4189 __ movd(temp, constant);
4190 __ andps(out, temp);
4191 break;
4192 }
4193 case DataType::Type::kFloat64: {
4194 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4195 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4196 // TODO: Use a constant from the constant table (requires extra input).
4197 __ LoadLongConstant(temp, INT64_C(0x7FFFFFFFFFFFFFFF));
4198 __ andpd(out, temp);
4199 break;
4200 }
4201 default:
4202 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4203 }
4204}
4205
Calin Juravled0d48522014-11-04 16:40:20 +00004206void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004207 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004208 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004209 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004210 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004211 case DataType::Type::kInt8:
4212 case DataType::Type::kUint16:
4213 case DataType::Type::kInt16:
4214 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004215 locations->SetInAt(0, Location::Any());
4216 break;
4217 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004218 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004219 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
4220 if (!instruction->IsConstant()) {
4221 locations->AddTemp(Location::RequiresRegister());
4222 }
4223 break;
4224 }
4225 default:
4226 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
4227 }
Calin Juravled0d48522014-11-04 16:40:20 +00004228}
4229
4230void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004231 SlowPathCode* slow_path =
4232 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004233 codegen_->AddSlowPath(slow_path);
4234
4235 LocationSummary* locations = instruction->GetLocations();
4236 Location value = locations->InAt(0);
4237
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004238 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004239 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004240 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004241 case DataType::Type::kInt8:
4242 case DataType::Type::kUint16:
4243 case DataType::Type::kInt16:
4244 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004245 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004246 __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004247 __ j(kEqual, slow_path->GetEntryLabel());
4248 } else if (value.IsStackSlot()) {
4249 __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
4250 __ j(kEqual, slow_path->GetEntryLabel());
4251 } else {
4252 DCHECK(value.IsConstant()) << value;
4253 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004254 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004255 }
4256 }
4257 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004258 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004259 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004260 if (value.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004261 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004262 __ movl(temp, value.AsRegisterPairLow<Register>());
4263 __ orl(temp, value.AsRegisterPairHigh<Register>());
4264 __ j(kEqual, slow_path->GetEntryLabel());
4265 } else {
4266 DCHECK(value.IsConstant()) << value;
4267 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
4268 __ jmp(slow_path->GetEntryLabel());
4269 }
4270 }
4271 break;
4272 }
4273 default:
4274 LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004275 }
Calin Juravled0d48522014-11-04 16:40:20 +00004276}
4277
Calin Juravle9aec02f2014-11-18 23:06:35 +00004278void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
4279 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4280
4281 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004282 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004283
4284 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004285 case DataType::Type::kInt32:
4286 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004287 // Can't have Location::Any() and output SameAsFirstInput()
Calin Juravle9aec02f2014-11-18 23:06:35 +00004288 locations->SetInAt(0, Location::RequiresRegister());
Mark P Mendell73945692015-04-29 14:56:17 +00004289 // The shift count needs to be in CL or a constant.
4290 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
Calin Juravle9aec02f2014-11-18 23:06:35 +00004291 locations->SetOut(Location::SameAsFirstInput());
4292 break;
4293 }
4294 default:
4295 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4296 }
4297}
4298
4299void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
4300 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4301
4302 LocationSummary* locations = op->GetLocations();
4303 Location first = locations->InAt(0);
4304 Location second = locations->InAt(1);
4305 DCHECK(first.Equals(locations->Out()));
4306
4307 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004308 case DataType::Type::kInt32: {
Mark P Mendell73945692015-04-29 14:56:17 +00004309 DCHECK(first.IsRegister());
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004310 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004311 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004312 Register second_reg = second.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004313 DCHECK_EQ(ECX, second_reg);
4314 if (op->IsShl()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004315 __ shll(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004316 } else if (op->IsShr()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004317 __ sarl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004318 } else {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004319 __ shrl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004320 }
4321 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004322 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004323 if (shift == 0) {
4324 return;
4325 }
4326 Immediate imm(shift);
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004327 if (op->IsShl()) {
4328 __ shll(first_reg, imm);
4329 } else if (op->IsShr()) {
4330 __ sarl(first_reg, imm);
4331 } else {
4332 __ shrl(first_reg, imm);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004333 }
4334 }
4335 break;
4336 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004337 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004338 if (second.IsRegister()) {
4339 Register second_reg = second.AsRegister<Register>();
4340 DCHECK_EQ(ECX, second_reg);
4341 if (op->IsShl()) {
4342 GenerateShlLong(first, second_reg);
4343 } else if (op->IsShr()) {
4344 GenerateShrLong(first, second_reg);
4345 } else {
4346 GenerateUShrLong(first, second_reg);
4347 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004348 } else {
Mark P Mendell73945692015-04-29 14:56:17 +00004349 // Shift by a constant.
Roland Levillain5b5b9312016-03-22 14:57:31 +00004350 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004351 // Nothing to do if the shift is 0, as the input is already the output.
4352 if (shift != 0) {
4353 if (op->IsShl()) {
4354 GenerateShlLong(first, shift);
4355 } else if (op->IsShr()) {
4356 GenerateShrLong(first, shift);
4357 } else {
4358 GenerateUShrLong(first, shift);
4359 }
4360 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004361 }
4362 break;
4363 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00004364 default:
4365 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4366 }
4367}
4368
Mark P Mendell73945692015-04-29 14:56:17 +00004369void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, int shift) {
4370 Register low = loc.AsRegisterPairLow<Register>();
4371 Register high = loc.AsRegisterPairHigh<Register>();
Mark Mendellba56d062015-05-05 21:34:03 -04004372 if (shift == 1) {
4373 // This is just an addition.
4374 __ addl(low, low);
4375 __ adcl(high, high);
4376 } else if (shift == 32) {
Mark P Mendell73945692015-04-29 14:56:17 +00004377 // Shift by 32 is easy. High gets low, and low gets 0.
4378 codegen_->EmitParallelMoves(
4379 loc.ToLow(),
4380 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004381 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004382 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4383 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004384 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004385 } else if (shift > 32) {
4386 // Low part becomes 0. High part is low part << (shift-32).
4387 __ movl(high, low);
4388 __ shll(high, Immediate(shift - 32));
4389 __ xorl(low, low);
4390 } else {
4391 // Between 1 and 31.
4392 __ shld(high, low, Immediate(shift));
4393 __ shll(low, Immediate(shift));
4394 }
4395}
4396
Calin Juravle9aec02f2014-11-18 23:06:35 +00004397void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004398 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004399 __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
4400 __ shll(loc.AsRegisterPairLow<Register>(), shifter);
4401 __ testl(shifter, Immediate(32));
4402 __ j(kEqual, &done);
4403 __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
4404 __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
4405 __ Bind(&done);
4406}
4407
Mark P Mendell73945692015-04-29 14:56:17 +00004408void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, int shift) {
4409 Register low = loc.AsRegisterPairLow<Register>();
4410 Register high = loc.AsRegisterPairHigh<Register>();
4411 if (shift == 32) {
4412 // Need to copy the sign.
4413 DCHECK_NE(low, high);
4414 __ movl(low, high);
4415 __ sarl(high, Immediate(31));
4416 } else if (shift > 32) {
4417 DCHECK_NE(low, high);
4418 // High part becomes sign. Low part is shifted by shift - 32.
4419 __ movl(low, high);
4420 __ sarl(high, Immediate(31));
4421 __ sarl(low, Immediate(shift - 32));
4422 } else {
4423 // Between 1 and 31.
4424 __ shrd(low, high, Immediate(shift));
4425 __ sarl(high, Immediate(shift));
4426 }
4427}
4428
Calin Juravle9aec02f2014-11-18 23:06:35 +00004429void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004430 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004431 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4432 __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
4433 __ testl(shifter, Immediate(32));
4434 __ j(kEqual, &done);
4435 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4436 __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
4437 __ Bind(&done);
4438}
4439
Mark P Mendell73945692015-04-29 14:56:17 +00004440void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, int shift) {
4441 Register low = loc.AsRegisterPairLow<Register>();
4442 Register high = loc.AsRegisterPairHigh<Register>();
4443 if (shift == 32) {
4444 // Shift by 32 is easy. Low gets high, and high gets 0.
4445 codegen_->EmitParallelMoves(
4446 loc.ToHigh(),
4447 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004448 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004449 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4450 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004451 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004452 } else if (shift > 32) {
4453 // Low part is high >> (shift - 32). High part becomes 0.
4454 __ movl(low, high);
4455 __ shrl(low, Immediate(shift - 32));
4456 __ xorl(high, high);
4457 } else {
4458 // Between 1 and 31.
4459 __ shrd(low, high, Immediate(shift));
4460 __ shrl(high, Immediate(shift));
4461 }
4462}
4463
Calin Juravle9aec02f2014-11-18 23:06:35 +00004464void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004465 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004466 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4467 __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
4468 __ testl(shifter, Immediate(32));
4469 __ j(kEqual, &done);
4470 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4471 __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
4472 __ Bind(&done);
4473}
4474
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004475void LocationsBuilderX86::VisitRor(HRor* ror) {
4476 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004477 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004478
4479 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004480 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004481 // Add the temporary needed.
4482 locations->AddTemp(Location::RequiresRegister());
4483 FALLTHROUGH_INTENDED;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004484 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004485 locations->SetInAt(0, Location::RequiresRegister());
4486 // The shift count needs to be in CL (unless it is a constant).
4487 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, ror->InputAt(1)));
4488 locations->SetOut(Location::SameAsFirstInput());
4489 break;
4490 default:
4491 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4492 UNREACHABLE();
4493 }
4494}
4495
4496void InstructionCodeGeneratorX86::VisitRor(HRor* ror) {
4497 LocationSummary* locations = ror->GetLocations();
4498 Location first = locations->InAt(0);
4499 Location second = locations->InAt(1);
4500
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004501 if (ror->GetResultType() == DataType::Type::kInt32) {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004502 Register first_reg = first.AsRegister<Register>();
4503 if (second.IsRegister()) {
4504 Register second_reg = second.AsRegister<Register>();
4505 __ rorl(first_reg, second_reg);
4506 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004507 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004508 __ rorl(first_reg, imm);
4509 }
4510 return;
4511 }
4512
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004513 DCHECK_EQ(ror->GetResultType(), DataType::Type::kInt64);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004514 Register first_reg_lo = first.AsRegisterPairLow<Register>();
4515 Register first_reg_hi = first.AsRegisterPairHigh<Register>();
4516 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
4517 if (second.IsRegister()) {
4518 Register second_reg = second.AsRegister<Register>();
4519 DCHECK_EQ(second_reg, ECX);
4520 __ movl(temp_reg, first_reg_hi);
4521 __ shrd(first_reg_hi, first_reg_lo, second_reg);
4522 __ shrd(first_reg_lo, temp_reg, second_reg);
4523 __ movl(temp_reg, first_reg_hi);
4524 __ testl(second_reg, Immediate(32));
4525 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
4526 __ cmovl(kNotEqual, first_reg_lo, temp_reg);
4527 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004528 int32_t shift_amt = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004529 if (shift_amt == 0) {
4530 // Already fine.
4531 return;
4532 }
4533 if (shift_amt == 32) {
4534 // Just swap.
4535 __ movl(temp_reg, first_reg_lo);
4536 __ movl(first_reg_lo, first_reg_hi);
4537 __ movl(first_reg_hi, temp_reg);
4538 return;
4539 }
4540
4541 Immediate imm(shift_amt);
4542 // Save the constents of the low value.
4543 __ movl(temp_reg, first_reg_lo);
4544
4545 // Shift right into low, feeding bits from high.
4546 __ shrd(first_reg_lo, first_reg_hi, imm);
4547
4548 // Shift right into high, feeding bits from the original low.
4549 __ shrd(first_reg_hi, temp_reg, imm);
4550
4551 // Swap if needed.
4552 if (shift_amt > 32) {
4553 __ movl(temp_reg, first_reg_lo);
4554 __ movl(first_reg_lo, first_reg_hi);
4555 __ movl(first_reg_hi, temp_reg);
4556 }
4557 }
4558}
4559
Calin Juravle9aec02f2014-11-18 23:06:35 +00004560void LocationsBuilderX86::VisitShl(HShl* shl) {
4561 HandleShift(shl);
4562}
4563
4564void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
4565 HandleShift(shl);
4566}
4567
4568void LocationsBuilderX86::VisitShr(HShr* shr) {
4569 HandleShift(shr);
4570}
4571
4572void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
4573 HandleShift(shr);
4574}
4575
4576void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
4577 HandleShift(ushr);
4578}
4579
4580void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
4581 HandleShift(ushr);
4582}
4583
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004584void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004585 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4586 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004587 locations->SetOut(Location::RegisterLocation(EAX));
Alex Lightd109e302018-06-27 10:25:41 -07004588 InvokeRuntimeCallingConvention calling_convention;
4589 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004590}
4591
4592void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004593 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4594 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4595 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004596}
4597
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004598void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004599 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4600 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004601 locations->SetOut(Location::RegisterLocation(EAX));
4602 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004603 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4604 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004605}
4606
4607void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004608 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4609 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayd0958442017-01-30 14:57:16 +00004610 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004611 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004612 DCHECK(!codegen_->IsLeafMethod());
4613}
4614
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004615void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004616 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004617 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004618 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4619 if (location.IsStackSlot()) {
4620 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4621 } else if (location.IsDoubleStackSlot()) {
4622 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004623 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004624 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004625}
4626
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004627void InstructionCodeGeneratorX86::VisitParameterValue(
4628 HParameterValue* instruction ATTRIBUTE_UNUSED) {
4629}
4630
4631void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
4632 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004633 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004634 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4635}
4636
4637void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004638}
4639
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004640void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
4641 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004642 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004643 locations->SetInAt(0, Location::RequiresRegister());
4644 locations->SetOut(Location::RequiresRegister());
4645}
4646
4647void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) {
4648 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004649 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004650 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004651 instruction->GetIndex(), kX86PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004652 __ movl(locations->Out().AsRegister<Register>(),
4653 Address(locations->InAt(0).AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004654 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004655 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004656 instruction->GetIndex(), kX86PointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004657 __ movl(locations->Out().AsRegister<Register>(),
4658 Address(locations->InAt(0).AsRegister<Register>(),
4659 mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
4660 // temp = temp->GetImtEntryAt(method_offset);
4661 __ movl(locations->Out().AsRegister<Register>(),
4662 Address(locations->Out().AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004663 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004664}
4665
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004666void LocationsBuilderX86::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004667 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004668 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004669 locations->SetInAt(0, Location::RequiresRegister());
4670 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004671}
4672
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004673void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
4674 LocationSummary* locations = not_->GetLocations();
Roland Levillain70566432014-10-24 16:20:17 +01004675 Location in = locations->InAt(0);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004676 Location out = locations->Out();
Roland Levillain70566432014-10-24 16:20:17 +01004677 DCHECK(in.Equals(out));
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004678 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004679 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004680 __ notl(out.AsRegister<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004681 break;
4682
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004683 case DataType::Type::kInt64:
Roland Levillain70566432014-10-24 16:20:17 +01004684 __ notl(out.AsRegisterPairLow<Register>());
4685 __ notl(out.AsRegisterPairHigh<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004686 break;
4687
4688 default:
4689 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4690 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004691}
4692
David Brazdil66d126e2015-04-03 16:02:44 +01004693void LocationsBuilderX86::VisitBooleanNot(HBooleanNot* bool_not) {
4694 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004695 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004696 locations->SetInAt(0, Location::RequiresRegister());
4697 locations->SetOut(Location::SameAsFirstInput());
4698}
4699
4700void InstructionCodeGeneratorX86::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004701 LocationSummary* locations = bool_not->GetLocations();
4702 Location in = locations->InAt(0);
4703 Location out = locations->Out();
4704 DCHECK(in.Equals(out));
4705 __ xorl(out.AsRegister<Register>(), Immediate(1));
4706}
4707
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004708void LocationsBuilderX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004709 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004710 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00004711 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004712 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004713 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004714 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004715 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004716 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004717 case DataType::Type::kInt32:
4718 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00004719 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravleddb7df22014-11-25 20:56:51 +00004720 locations->SetInAt(1, Location::Any());
4721 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4722 break;
4723 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004724 case DataType::Type::kFloat32:
4725 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00004726 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004727 if (compare->InputAt(1)->IsX86LoadFromConstantTable()) {
4728 DCHECK(compare->InputAt(1)->IsEmittedAtUseSite());
4729 } else if (compare->InputAt(1)->IsConstant()) {
4730 locations->SetInAt(1, Location::RequiresFpuRegister());
4731 } else {
4732 locations->SetInAt(1, Location::Any());
4733 }
Calin Juravleddb7df22014-11-25 20:56:51 +00004734 locations->SetOut(Location::RequiresRegister());
4735 break;
4736 }
4737 default:
4738 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
4739 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004740}
4741
4742void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004743 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004744 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00004745 Location left = locations->InAt(0);
4746 Location right = locations->InAt(1);
4747
Mark Mendell0c9497d2015-08-21 09:30:05 -04004748 NearLabel less, greater, done;
Aart Bika19616e2016-02-01 18:57:58 -08004749 Condition less_cond = kLess;
4750
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004751 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004752 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004753 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004754 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004755 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004756 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004757 case DataType::Type::kInt32: {
Roland Levillain0b671c02016-08-19 12:02:34 +01004758 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08004759 break;
4760 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004761 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004762 Register left_low = left.AsRegisterPairLow<Register>();
4763 Register left_high = left.AsRegisterPairHigh<Register>();
4764 int32_t val_low = 0;
4765 int32_t val_high = 0;
4766 bool right_is_const = false;
4767
4768 if (right.IsConstant()) {
4769 DCHECK(right.GetConstant()->IsLongConstant());
4770 right_is_const = true;
4771 int64_t val = right.GetConstant()->AsLongConstant()->GetValue();
4772 val_low = Low32Bits(val);
4773 val_high = High32Bits(val);
4774 }
4775
Calin Juravleddb7df22014-11-25 20:56:51 +00004776 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004777 __ cmpl(left_high, right.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004778 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004779 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004780 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004781 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004782 codegen_->Compare32BitValue(left_high, val_high);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004783 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004784 __ j(kLess, &less); // Signed compare.
4785 __ j(kGreater, &greater); // Signed compare.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004786 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004787 __ cmpl(left_low, right.AsRegisterPairLow<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004788 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004789 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004790 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004791 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004792 codegen_->Compare32BitValue(left_low, val_low);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004793 }
Aart Bika19616e2016-02-01 18:57:58 -08004794 less_cond = kBelow; // for CF (unsigned).
Calin Juravleddb7df22014-11-25 20:56:51 +00004795 break;
4796 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004797 case DataType::Type::kFloat32: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004798 GenerateFPCompare(left, right, compare, false);
Calin Juravleddb7df22014-11-25 20:56:51 +00004799 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004800 less_cond = kBelow; // for CF (floats).
Calin Juravleddb7df22014-11-25 20:56:51 +00004801 break;
4802 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004803 case DataType::Type::kFloat64: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004804 GenerateFPCompare(left, right, compare, true);
Calin Juravleddb7df22014-11-25 20:56:51 +00004805 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004806 less_cond = kBelow; // for CF (floats).
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004807 break;
4808 }
4809 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00004810 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004811 }
Aart Bika19616e2016-02-01 18:57:58 -08004812
Calin Juravleddb7df22014-11-25 20:56:51 +00004813 __ movl(out, Immediate(0));
4814 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08004815 __ j(less_cond, &less);
Calin Juravleddb7df22014-11-25 20:56:51 +00004816
4817 __ Bind(&greater);
4818 __ movl(out, Immediate(1));
4819 __ jmp(&done);
4820
4821 __ Bind(&less);
4822 __ movl(out, Immediate(-1));
4823
4824 __ Bind(&done);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004825}
4826
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004827void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004828 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004829 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004830 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01004831 locations->SetInAt(i, Location::Any());
4832 }
4833 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004834}
4835
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004836void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01004837 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004838}
4839
Roland Levillain7c1559a2015-12-15 10:55:36 +00004840void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004841 /*
4842 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
4843 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86 memory model.
4844 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4845 */
4846 switch (kind) {
4847 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004848 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004849 break;
4850 }
4851 case MemBarrierKind::kAnyStore:
4852 case MemBarrierKind::kLoadAny:
4853 case MemBarrierKind::kStoreStore: {
4854 // nop
4855 break;
4856 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004857 case MemBarrierKind::kNTStoreStore:
4858 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004859 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004860 break;
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004861 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004862}
4863
Vladimir Markodc151b22015-10-15 18:02:30 +01004864HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
4865 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01004866 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004867 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01004868}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004869
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004870Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
4871 Register temp) {
4872 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Markoc53c0792015-11-19 15:48:33 +00004873 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004874 if (!invoke->GetLocations()->Intrinsified()) {
4875 return location.AsRegister<Register>();
4876 }
4877 // For intrinsics we allow any location, so it may be on the stack.
4878 if (!location.IsRegister()) {
4879 __ movl(temp, Address(ESP, location.GetStackIndex()));
4880 return temp;
4881 }
4882 // For register locations, check if the register was saved. If so, get it from the stack.
4883 // Note: There is a chance that the register was saved but not overwritten, so we could
4884 // save one load. However, since this is just an intrinsic slow path we prefer this
4885 // simple and more robust approach rather that trying to determine if that's the case.
4886 SlowPathCode* slow_path = GetCurrentSlowPath();
Vladimir Marko4ee8e292017-06-02 15:39:30 +00004887 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
4888 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
4889 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
4890 __ movl(temp, Address(ESP, stack_offset));
4891 return temp;
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004892 }
4893 return location.AsRegister<Register>();
4894}
4895
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004896void CodeGeneratorX86::GenerateStaticOrDirectCall(
4897 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Vladimir Marko58155012015-08-19 12:49:41 +00004898 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4899 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004900 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00004901 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004902 uint32_t offset =
4903 GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
4904 __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004905 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004906 }
Vladimir Marko58155012015-08-19 12:49:41 +00004907 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004908 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004909 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004910 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01004911 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01004912 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4913 temp.AsRegister<Register>());
4914 __ leal(temp.AsRegister<Register>(), Address(base_reg, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004915 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01004916 break;
4917 }
Vladimir Markob066d432018-01-03 13:14:37 +00004918 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4919 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4920 temp.AsRegister<Register>());
4921 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
4922 RecordBootImageRelRoPatch(
4923 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress(),
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004924 GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00004925 break;
4926 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004927 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004928 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4929 temp.AsRegister<Register>());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004930 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004931 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004932 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004933 break;
4934 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004935 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4936 __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
4937 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004938 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4939 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4940 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01004941 }
Vladimir Marko58155012015-08-19 12:49:41 +00004942 }
4943
4944 switch (invoke->GetCodePtrLocation()) {
4945 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4946 __ call(GetFrameEntryLabel());
4947 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004948 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4949 // (callee_method + offset_of_quick_compiled_code)()
4950 __ call(Address(callee_method.AsRegister<Register>(),
4951 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07004952 kX86PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004953 break;
Mark Mendell09ed1a32015-03-25 08:30:06 -04004954 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004955 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Mark Mendell09ed1a32015-03-25 08:30:06 -04004956
4957 DCHECK(!IsLeafMethod());
Mark Mendell09ed1a32015-03-25 08:30:06 -04004958}
4959
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004960void CodeGeneratorX86::GenerateVirtualCall(
4961 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004962 Register temp = temp_in.AsRegister<Register>();
4963 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4964 invoke->GetVTableIndex(), kX86PointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004965
4966 // Use the calling convention instead of the location of the receiver, as
4967 // intrinsics may have put the receiver in a different register. In the intrinsics
4968 // slow path, the arguments have been moved to the right place, so here we are
4969 // guaranteed that the receiver is the first register of the calling convention.
4970 InvokeDexCallingConvention calling_convention;
4971 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004972 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004973 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004974 __ movl(temp, Address(receiver, class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004975 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004976 // Instead of simply (possibly) unpoisoning `temp` here, we should
4977 // emit a read barrier for the previous class reference load.
4978 // However this is not required in practice, as this is an
4979 // intermediate/temporary reference and because the current
4980 // concurrent copying collector keeps the from-space memory
4981 // intact/accessible until the end of the marking phase (the
4982 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004983 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00004984
4985 MaybeGenerateInlineCacheCheck(invoke, temp);
4986
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004987 // temp = temp->GetMethodAt(method_offset);
4988 __ movl(temp, Address(temp, method_offset));
4989 // call temp->GetEntryPoint();
4990 __ call(Address(
Andreas Gampe542451c2016-07-26 09:02:02 -07004991 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004992 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004993}
4994
Vladimir Marko6fd16062018-06-26 11:02:04 +01004995void CodeGeneratorX86::RecordBootImageIntrinsicPatch(HX86ComputeBaseMethodAddress* method_address,
4996 uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01004997 boot_image_other_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08004998 method_address, /* target_dex_file= */ nullptr, intrinsic_data);
Vladimir Marko2d06e022019-07-08 15:45:19 +01004999 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01005000}
5001
Vladimir Markob066d432018-01-03 13:14:37 +00005002void CodeGeneratorX86::RecordBootImageRelRoPatch(HX86ComputeBaseMethodAddress* method_address,
5003 uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01005004 boot_image_other_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08005005 method_address, /* target_dex_file= */ nullptr, boot_image_offset);
Vladimir Marko2d06e022019-07-08 15:45:19 +01005006 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00005007}
5008
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005009void CodeGeneratorX86::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
Vladimir Marko65979462017-05-19 17:25:12 +01005010 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005011 HX86ComputeBaseMethodAddress* method_address =
Vladimir Marko65979462017-05-19 17:25:12 +01005012 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005013 boot_image_method_patches_.emplace_back(
5014 method_address, invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01005015 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005016}
5017
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005018void CodeGeneratorX86::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
5019 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5020 HX86ComputeBaseMethodAddress* method_address =
5021 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005022 // Add the patch entry and bind its label at the end of the instruction.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005023 method_bss_entry_patches_.emplace_back(
5024 method_address, &GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
5025 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005026}
5027
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005028void CodeGeneratorX86::RecordBootImageTypePatch(HLoadClass* load_class) {
5029 HX86ComputeBaseMethodAddress* method_address =
5030 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
5031 boot_image_type_patches_.emplace_back(
5032 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00005033 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005034}
5035
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005036Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005037 HX86ComputeBaseMethodAddress* method_address =
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005038 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
5039 type_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005040 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00005041 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005042}
5043
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005044void CodeGeneratorX86::RecordBootImageStringPatch(HLoadString* load_string) {
5045 HX86ComputeBaseMethodAddress* method_address =
5046 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
5047 boot_image_string_patches_.emplace_back(
5048 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
5049 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01005050}
5051
Vladimir Markoaad75c62016-10-03 08:46:48 +00005052Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005053 HX86ComputeBaseMethodAddress* method_address =
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005054 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005055 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005056 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005057 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005058}
5059
Vladimir Markoeebb8212018-06-05 14:57:24 +01005060void CodeGeneratorX86::LoadBootImageAddress(Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01005061 uint32_t boot_image_reference,
Vladimir Markoeebb8212018-06-05 14:57:24 +01005062 HInvokeStaticOrDirect* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01005063 if (GetCompilerOptions().IsBootImage()) {
5064 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5065 HX86ComputeBaseMethodAddress* method_address =
5066 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5067 DCHECK(method_address != nullptr);
5068 Register method_address_reg =
5069 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5070 __ leal(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
5071 RecordBootImageIntrinsicPatch(method_address, boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01005072 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01005073 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5074 HX86ComputeBaseMethodAddress* method_address =
5075 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5076 DCHECK(method_address != nullptr);
5077 Register method_address_reg =
5078 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5079 __ movl(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko6fd16062018-06-26 11:02:04 +01005080 RecordBootImageRelRoPatch(method_address, boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01005081 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005082 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01005083 gc::Heap* heap = Runtime::Current()->GetHeap();
5084 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01005085 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01005086 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
5087 }
5088}
5089
Vladimir Marko6fd16062018-06-26 11:02:04 +01005090void CodeGeneratorX86::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
5091 uint32_t boot_image_offset) {
5092 DCHECK(invoke->IsStatic());
5093 InvokeRuntimeCallingConvention calling_convention;
5094 Register argument = calling_convention.GetRegisterAt(0);
5095 if (GetCompilerOptions().IsBootImage()) {
5096 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
5097 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
5098 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5099 HX86ComputeBaseMethodAddress* method_address =
5100 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5101 DCHECK(method_address != nullptr);
5102 Register method_address_reg =
5103 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5104 __ leal(argument, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
5105 MethodReference target_method = invoke->GetTargetMethod();
5106 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
5107 boot_image_type_patches_.emplace_back(method_address, target_method.dex_file, type_idx.index_);
5108 __ Bind(&boot_image_type_patches_.back().label);
5109 } else {
5110 LoadBootImageAddress(argument, boot_image_offset, invoke);
5111 }
5112 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
5113 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
5114}
5115
Vladimir Markoaad75c62016-10-03 08:46:48 +00005116// The label points to the end of the "movl" or another instruction but the literal offset
5117// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
5118constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
5119
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005120template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00005121inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005122 const ArenaDeque<X86PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005123 ArenaVector<linker::LinkerPatch>* linker_patches) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005124 for (const X86PcRelativePatchInfo& info : infos) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005125 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005126 linker_patches->push_back(Factory(literal_offset,
5127 info.target_dex_file,
5128 GetMethodAddressOffset(info.method_address),
5129 info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005130 }
5131}
5132
Vladimir Marko6fd16062018-06-26 11:02:04 +01005133template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
5134linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
5135 const DexFile* target_dex_file,
5136 uint32_t pc_insn_offset,
5137 uint32_t boot_image_offset) {
5138 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
5139 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00005140}
5141
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005142void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00005143 DCHECK(linker_patches->empty());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005144 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01005145 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005146 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00005147 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01005148 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005149 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01005150 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01005151 boot_image_other_patches_.size();
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005152 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01005153 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005154 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
5155 boot_image_method_patches_, linker_patches);
5156 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
5157 boot_image_type_patches_, linker_patches);
5158 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005159 boot_image_string_patches_, linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005160 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01005161 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005162 DCHECK(boot_image_type_patches_.empty());
5163 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01005164 }
5165 if (GetCompilerOptions().IsBootImage()) {
5166 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
5167 boot_image_other_patches_, linker_patches);
5168 } else {
5169 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
5170 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005171 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005172 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
5173 method_bss_entry_patches_, linker_patches);
5174 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
5175 type_bss_entry_patches_, linker_patches);
5176 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
5177 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00005178 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00005179}
5180
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005181void CodeGeneratorX86::MarkGCCard(Register temp,
5182 Register card,
5183 Register object,
5184 Register value,
5185 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005186 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005187 if (value_can_be_null) {
5188 __ testl(value, value);
5189 __ j(kEqual, &is_null);
5190 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005191 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005192 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01005193 // Calculate the offset (in the card table) of the card corresponding to
5194 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005195 __ movl(temp, object);
5196 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005197 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5198 // `object`'s card.
5199 //
5200 // Register `card` contains the address of the card table. Note that the card
5201 // table's base is biased during its creation so that it always starts at an
5202 // address whose least-significant byte is equal to `kCardDirty` (see
5203 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5204 // below writes the `kCardDirty` (byte) value into the `object`'s card
5205 // (located at `card + object >> kCardShift`).
5206 //
5207 // This dual use of the value in register `card` (1. to calculate the location
5208 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5209 // (no need to explicitly load `kCardDirty` as an immediate value).
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00005210 __ movb(Address(temp, card, TIMES_1, 0),
5211 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005212 if (value_can_be_null) {
5213 __ Bind(&is_null);
5214 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005215}
5216
Calin Juravle52c48962014-12-16 17:02:57 +00005217void LocationsBuilderX86::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
5218 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005219
5220 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005221 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005222 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005223 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5224 kEmitCompilerReadBarrier
5225 ? LocationSummary::kCallOnSlowPath
5226 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005227 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005228 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005229 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005230 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005231
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005232 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005233 locations->SetOut(Location::RequiresFpuRegister());
5234 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005235 // The output overlaps in case of long: we don't want the low move
5236 // to overwrite the object's location. Likewise, in the case of
5237 // an object field get with read barriers enabled, we do not want
5238 // the move to overwrite the object's location, as we need it to emit
5239 // the read barrier.
5240 locations->SetOut(
5241 Location::RequiresRegister(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005242 (object_field_get_with_read_barrier || instruction->GetType() == DataType::Type::kInt64) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005243 Location::kOutputOverlap :
5244 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005245 }
Calin Juravle52c48962014-12-16 17:02:57 +00005246
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005247 if (field_info.IsVolatile() && (field_info.GetFieldType() == DataType::Type::kInt64)) {
Calin Juravle52c48962014-12-16 17:02:57 +00005248 // Long values can be loaded atomically into an XMM using movsd.
Roland Levillain7c1559a2015-12-15 10:55:36 +00005249 // So we use an XMM register as a temp to achieve atomicity (first
5250 // load the temp into the XMM and then copy the XMM into the
5251 // output, 32 bits at a time).
Calin Juravle52c48962014-12-16 17:02:57 +00005252 locations->AddTemp(Location::RequiresFpuRegister());
5253 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005254}
5255
Calin Juravle52c48962014-12-16 17:02:57 +00005256void InstructionCodeGeneratorX86::HandleFieldGet(HInstruction* instruction,
5257 const FieldInfo& field_info) {
5258 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005259
Calin Juravle52c48962014-12-16 17:02:57 +00005260 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005261 Location base_loc = locations->InAt(0);
5262 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00005263 Location out = locations->Out();
5264 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01005265 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
5266 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00005267 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5268
Vladimir Marko61b92282017-10-11 13:23:17 +01005269 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005270 case DataType::Type::kBool:
5271 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005272 __ movzxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005273 break;
5274 }
5275
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005276 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005277 __ movsxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005278 break;
5279 }
5280
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005281 case DataType::Type::kUint16: {
5282 __ movzxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005283 break;
5284 }
5285
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005286 case DataType::Type::kInt16: {
5287 __ movsxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005288 break;
5289 }
5290
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005291 case DataType::Type::kInt32:
Calin Juravle52c48962014-12-16 17:02:57 +00005292 __ movl(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005293 break;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005294
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005295 case DataType::Type::kReference: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005296 // /* HeapReference<Object> */ out = *(base + offset)
5297 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005298 // Note that a potential implicit null check is handled in this
5299 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
5300 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005301 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005302 if (is_volatile) {
5303 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5304 }
5305 } else {
5306 __ movl(out.AsRegister<Register>(), Address(base, offset));
5307 codegen_->MaybeRecordImplicitNullCheck(instruction);
5308 if (is_volatile) {
5309 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5310 }
5311 // If read barriers are enabled, emit read barriers other than
5312 // Baker's using a slow path (and also unpoison the loaded
5313 // reference, if heap poisoning is enabled).
5314 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
5315 }
5316 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005317 }
5318
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005319 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005320 if (is_volatile) {
5321 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
5322 __ movsd(temp, Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005323 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005324 __ movd(out.AsRegisterPairLow<Register>(), temp);
5325 __ psrlq(temp, Immediate(32));
5326 __ movd(out.AsRegisterPairHigh<Register>(), temp);
5327 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005328 DCHECK_NE(base, out.AsRegisterPairLow<Register>());
Calin Juravle52c48962014-12-16 17:02:57 +00005329 __ movl(out.AsRegisterPairLow<Register>(), Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005330 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005331 __ movl(out.AsRegisterPairHigh<Register>(), Address(base, kX86WordSize + offset));
5332 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005333 break;
5334 }
5335
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005336 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00005337 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005338 break;
5339 }
5340
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005341 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005342 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005343 break;
5344 }
5345
Aart Bik66c158e2018-01-31 12:55:04 -08005346 case DataType::Type::kUint32:
5347 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005348 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01005349 LOG(FATAL) << "Unreachable type " << load_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005350 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005351 }
Calin Juravle52c48962014-12-16 17:02:57 +00005352
Vladimir Marko61b92282017-10-11 13:23:17 +01005353 if (load_type == DataType::Type::kReference || load_type == DataType::Type::kInt64) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005354 // Potential implicit null checks, in the case of reference or
5355 // long fields, are handled in the previous switch statement.
5356 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005357 codegen_->MaybeRecordImplicitNullCheck(instruction);
5358 }
5359
Calin Juravle52c48962014-12-16 17:02:57 +00005360 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01005361 if (load_type == DataType::Type::kReference) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005362 // Memory barriers, in the case of references, are also handled
5363 // in the previous switch statement.
5364 } else {
5365 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5366 }
Roland Levillain4d027112015-07-01 15:41:14 +01005367 }
Calin Juravle52c48962014-12-16 17:02:57 +00005368}
5369
5370void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
5371 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5372
5373 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005374 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00005375 locations->SetInAt(0, Location::RequiresRegister());
5376 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005377 DataType::Type field_type = field_info.GetFieldType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005378 bool is_byte_type = DataType::Size(field_type) == 1u;
Calin Juravle52c48962014-12-16 17:02:57 +00005379
5380 // The register allocator does not support multiple
5381 // inputs that die at entry with one in a specific register.
5382 if (is_byte_type) {
5383 // Ensure the value is in a byte register.
5384 locations->SetInAt(1, Location::RegisterLocation(EAX));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005385 } else if (DataType::IsFloatingPointType(field_type)) {
5386 if (is_volatile && field_type == DataType::Type::kFloat64) {
Mark Mendell81489372015-11-04 11:30:41 -05005387 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5388 locations->SetInAt(1, Location::RequiresFpuRegister());
5389 } else {
5390 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
5391 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005392 } else if (is_volatile && field_type == DataType::Type::kInt64) {
Mark Mendell81489372015-11-04 11:30:41 -05005393 // In order to satisfy the semantics of volatile, this must be a single instruction store.
Calin Juravle52c48962014-12-16 17:02:57 +00005394 locations->SetInAt(1, Location::RequiresRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005395
Calin Juravle52c48962014-12-16 17:02:57 +00005396 // 64bits value can be atomically written to an address with movsd and an XMM register.
5397 // We need two XMM registers because there's no easier way to (bit) copy a register pair
5398 // into a single XMM register (we copy each pair part into the XMMs and then interleave them).
5399 // NB: We could make the register allocator understand fp_reg <-> core_reg moves but given the
5400 // isolated cases when we need this it isn't worth adding the extra complexity.
5401 locations->AddTemp(Location::RequiresFpuRegister());
5402 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005403 } else {
5404 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5405
5406 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
5407 // Temporary registers for the write barrier.
5408 locations->AddTemp(Location::RequiresRegister()); // May be used for reference poisoning too.
5409 // Ensure the card is in a byte register.
5410 locations->AddTemp(Location::RegisterLocation(ECX));
5411 }
Calin Juravle52c48962014-12-16 17:02:57 +00005412 }
5413}
5414
5415void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005416 const FieldInfo& field_info,
5417 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00005418 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5419
5420 LocationSummary* locations = instruction->GetLocations();
5421 Register base = locations->InAt(0).AsRegister<Register>();
5422 Location value = locations->InAt(1);
5423 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005424 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00005425 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01005426 bool needs_write_barrier =
5427 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00005428
5429 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005430 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00005431 }
5432
Mark Mendell81489372015-11-04 11:30:41 -05005433 bool maybe_record_implicit_null_check_done = false;
5434
Calin Juravle52c48962014-12-16 17:02:57 +00005435 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005436 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005437 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005438 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005439 __ movb(Address(base, offset), value.AsRegister<ByteRegister>());
5440 break;
5441 }
5442
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005443 case DataType::Type::kUint16:
5444 case DataType::Type::kInt16: {
Mark Mendell81489372015-11-04 11:30:41 -05005445 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005446 __ movw(Address(base, offset),
5447 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell81489372015-11-04 11:30:41 -05005448 } else {
5449 __ movw(Address(base, offset), value.AsRegister<Register>());
5450 }
Calin Juravle52c48962014-12-16 17:02:57 +00005451 break;
5452 }
5453
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005454 case DataType::Type::kInt32:
5455 case DataType::Type::kReference: {
Roland Levillain4d027112015-07-01 15:41:14 +01005456 if (kPoisonHeapReferences && needs_write_barrier) {
5457 // Note that in the case where `value` is a null reference,
5458 // we do not enter this block, as the reference does not
5459 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005460 DCHECK_EQ(field_type, DataType::Type::kReference);
Roland Levillain4d027112015-07-01 15:41:14 +01005461 Register temp = locations->GetTemp(0).AsRegister<Register>();
5462 __ movl(temp, value.AsRegister<Register>());
5463 __ PoisonHeapReference(temp);
5464 __ movl(Address(base, offset), temp);
Mark Mendell81489372015-11-04 11:30:41 -05005465 } else if (value.IsConstant()) {
5466 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5467 __ movl(Address(base, offset), Immediate(v));
Roland Levillain4d027112015-07-01 15:41:14 +01005468 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +00005469 DCHECK(value.IsRegister()) << value;
Roland Levillain4d027112015-07-01 15:41:14 +01005470 __ movl(Address(base, offset), value.AsRegister<Register>());
5471 }
Calin Juravle52c48962014-12-16 17:02:57 +00005472 break;
5473 }
5474
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005475 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005476 if (is_volatile) {
5477 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
5478 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
5479 __ movd(temp1, value.AsRegisterPairLow<Register>());
5480 __ movd(temp2, value.AsRegisterPairHigh<Register>());
5481 __ punpckldq(temp1, temp2);
5482 __ movsd(Address(base, offset), temp1);
Calin Juravle77520bc2015-01-12 18:45:46 +00005483 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell81489372015-11-04 11:30:41 -05005484 } else if (value.IsConstant()) {
5485 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5486 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5487 codegen_->MaybeRecordImplicitNullCheck(instruction);
5488 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
Calin Juravle52c48962014-12-16 17:02:57 +00005489 } else {
5490 __ movl(Address(base, offset), value.AsRegisterPairLow<Register>());
Calin Juravle77520bc2015-01-12 18:45:46 +00005491 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005492 __ movl(Address(base, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
5493 }
Mark Mendell81489372015-11-04 11:30:41 -05005494 maybe_record_implicit_null_check_done = true;
Calin Juravle52c48962014-12-16 17:02:57 +00005495 break;
5496 }
5497
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005498 case DataType::Type::kFloat32: {
Mark Mendell81489372015-11-04 11:30:41 -05005499 if (value.IsConstant()) {
5500 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5501 __ movl(Address(base, offset), Immediate(v));
5502 } else {
5503 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5504 }
Calin Juravle52c48962014-12-16 17:02:57 +00005505 break;
5506 }
5507
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005508 case DataType::Type::kFloat64: {
Mark Mendell81489372015-11-04 11:30:41 -05005509 if (value.IsConstant()) {
5510 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5511 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5512 codegen_->MaybeRecordImplicitNullCheck(instruction);
5513 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
5514 maybe_record_implicit_null_check_done = true;
5515 } else {
5516 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5517 }
Calin Juravle52c48962014-12-16 17:02:57 +00005518 break;
5519 }
5520
Aart Bik66c158e2018-01-31 12:55:04 -08005521 case DataType::Type::kUint32:
5522 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005523 case DataType::Type::kVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00005524 LOG(FATAL) << "Unreachable type " << field_type;
5525 UNREACHABLE();
5526 }
5527
Mark Mendell81489372015-11-04 11:30:41 -05005528 if (!maybe_record_implicit_null_check_done) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005529 codegen_->MaybeRecordImplicitNullCheck(instruction);
5530 }
5531
Roland Levillain4d027112015-07-01 15:41:14 +01005532 if (needs_write_barrier) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005533 Register temp = locations->GetTemp(0).AsRegister<Register>();
5534 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005535 codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005536 }
5537
Calin Juravle52c48962014-12-16 17:02:57 +00005538 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005539 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005540 }
5541}
5542
5543void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5544 HandleFieldGet(instruction, instruction->GetFieldInfo());
5545}
5546
5547void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5548 HandleFieldGet(instruction, instruction->GetFieldInfo());
5549}
5550
5551void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5552 HandleFieldSet(instruction, instruction->GetFieldInfo());
5553}
5554
5555void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005556 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005557}
5558
5559void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5560 HandleFieldSet(instruction, instruction->GetFieldInfo());
5561}
5562
5563void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005564 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005565}
5566
5567void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5568 HandleFieldGet(instruction, instruction->GetFieldInfo());
5569}
5570
5571void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5572 HandleFieldGet(instruction, instruction->GetFieldInfo());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005573}
5574
Vladimir Marko552a1342017-10-31 10:56:47 +00005575void LocationsBuilderX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5576 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(EAX));
5577}
5578
5579void InstructionCodeGeneratorX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5580 __ movl(EAX, Immediate(instruction->GetFormat()->GetValue()));
5581 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5582}
5583
Calin Juravlee460d1d2015-09-29 04:52:17 +01005584void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
5585 HUnresolvedInstanceFieldGet* instruction) {
5586 FieldAccessCallingConventionX86 calling_convention;
5587 codegen_->CreateUnresolvedFieldLocationSummary(
5588 instruction, instruction->GetFieldType(), calling_convention);
5589}
5590
5591void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
5592 HUnresolvedInstanceFieldGet* instruction) {
5593 FieldAccessCallingConventionX86 calling_convention;
5594 codegen_->GenerateUnresolvedFieldAccess(instruction,
5595 instruction->GetFieldType(),
5596 instruction->GetFieldIndex(),
5597 instruction->GetDexPc(),
5598 calling_convention);
5599}
5600
5601void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
5602 HUnresolvedInstanceFieldSet* instruction) {
5603 FieldAccessCallingConventionX86 calling_convention;
5604 codegen_->CreateUnresolvedFieldLocationSummary(
5605 instruction, instruction->GetFieldType(), calling_convention);
5606}
5607
5608void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
5609 HUnresolvedInstanceFieldSet* instruction) {
5610 FieldAccessCallingConventionX86 calling_convention;
5611 codegen_->GenerateUnresolvedFieldAccess(instruction,
5612 instruction->GetFieldType(),
5613 instruction->GetFieldIndex(),
5614 instruction->GetDexPc(),
5615 calling_convention);
5616}
5617
5618void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
5619 HUnresolvedStaticFieldGet* instruction) {
5620 FieldAccessCallingConventionX86 calling_convention;
5621 codegen_->CreateUnresolvedFieldLocationSummary(
5622 instruction, instruction->GetFieldType(), calling_convention);
5623}
5624
5625void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
5626 HUnresolvedStaticFieldGet* instruction) {
5627 FieldAccessCallingConventionX86 calling_convention;
5628 codegen_->GenerateUnresolvedFieldAccess(instruction,
5629 instruction->GetFieldType(),
5630 instruction->GetFieldIndex(),
5631 instruction->GetDexPc(),
5632 calling_convention);
5633}
5634
5635void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
5636 HUnresolvedStaticFieldSet* instruction) {
5637 FieldAccessCallingConventionX86 calling_convention;
5638 codegen_->CreateUnresolvedFieldLocationSummary(
5639 instruction, instruction->GetFieldType(), calling_convention);
5640}
5641
5642void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
5643 HUnresolvedStaticFieldSet* instruction) {
5644 FieldAccessCallingConventionX86 calling_convention;
5645 codegen_->GenerateUnresolvedFieldAccess(instruction,
5646 instruction->GetFieldType(),
5647 instruction->GetFieldIndex(),
5648 instruction->GetDexPc(),
5649 calling_convention);
5650}
5651
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005652void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005653 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5654 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5655 ? Location::RequiresRegister()
5656 : Location::Any();
5657 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005658}
5659
Calin Juravle2ae48182016-03-16 14:05:09 +00005660void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
5661 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005662 return;
5663 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005664 LocationSummary* locations = instruction->GetLocations();
5665 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005666
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005667 __ testl(EAX, Address(obj.AsRegister<Register>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005668 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005669}
5670
Calin Juravle2ae48182016-03-16 14:05:09 +00005671void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005672 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005673 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005674
5675 LocationSummary* locations = instruction->GetLocations();
5676 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005677
5678 if (obj.IsRegister()) {
Mark Mendell42514f62015-03-31 11:34:22 -04005679 __ testl(obj.AsRegister<Register>(), obj.AsRegister<Register>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005680 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005681 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005682 } else {
5683 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005684 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005685 __ jmp(slow_path->GetEntryLabel());
5686 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005687 }
5688 __ j(kEqual, slow_path->GetEntryLabel());
5689}
5690
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005691void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005692 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005693}
5694
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005695void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005696 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005697 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005698 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005699 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5700 object_array_get_with_read_barrier
5701 ? LocationSummary::kCallOnSlowPath
5702 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005703 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005704 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005705 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005706 locations->SetInAt(0, Location::RequiresRegister());
5707 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005708 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005709 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5710 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005711 // The output overlaps in case of long: we don't want the low move
5712 // to overwrite the array's location. Likewise, in the case of an
5713 // object array get with read barriers enabled, we do not want the
5714 // move to overwrite the array's location, as we need it to emit
5715 // the read barrier.
5716 locations->SetOut(
5717 Location::RequiresRegister(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005718 (instruction->GetType() == DataType::Type::kInt64 || object_array_get_with_read_barrier)
5719 ? Location::kOutputOverlap
5720 : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005721 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005722}
5723
5724void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
5725 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005726 Location obj_loc = locations->InAt(0);
5727 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005728 Location index = locations->InAt(1);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005729 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005730 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005731
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005732 DataType::Type type = instruction->GetType();
Calin Juravle77520bc2015-01-12 18:45:46 +00005733 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005734 case DataType::Type::kBool:
5735 case DataType::Type::kUint8: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005736 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005737 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005738 break;
5739 }
5740
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005741 case DataType::Type::kInt8: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005742 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005743 __ movsxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005744 break;
5745 }
5746
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005747 case DataType::Type::kUint16: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005748 Register out = out_loc.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07005749 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5750 // Branch cases into compressed and uncompressed for each index's type.
5751 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5752 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005753 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005754 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005755 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5756 "Expecting 0=compressed, 1=uncompressed");
5757 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005758 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
5759 __ jmp(&done);
5760 __ Bind(&not_compressed);
5761 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5762 __ Bind(&done);
5763 } else {
5764 // Common case for charAt of array of char or when string compression's
5765 // feature is turned off.
5766 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5767 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005768 break;
5769 }
5770
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005771 case DataType::Type::kInt16: {
5772 Register out = out_loc.AsRegister<Register>();
5773 __ movsxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5774 break;
5775 }
5776
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005777 case DataType::Type::kInt32: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005778 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005779 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005780 break;
5781 }
5782
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005783 case DataType::Type::kReference: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005784 static_assert(
5785 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5786 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00005787 // /* HeapReference<Object> */ out =
5788 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5789 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005790 // Note that a potential implicit null check is handled in this
5791 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
5792 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005793 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005794 } else {
5795 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005796 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
5797 codegen_->MaybeRecordImplicitNullCheck(instruction);
5798 // If read barriers are enabled, emit read barriers other than
5799 // Baker's using a slow path (and also unpoison the loaded
5800 // reference, if heap poisoning is enabled).
Roland Levillain7c1559a2015-12-15 10:55:36 +00005801 if (index.IsConstant()) {
5802 uint32_t offset =
5803 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005804 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5805 } else {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005806 codegen_->MaybeGenerateReadBarrierSlow(
5807 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5808 }
5809 }
5810 break;
5811 }
5812
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005813 case DataType::Type::kInt64: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005814 DCHECK_NE(obj, out_loc.AsRegisterPairLow<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005815 __ movl(out_loc.AsRegisterPairLow<Register>(),
5816 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
5817 codegen_->MaybeRecordImplicitNullCheck(instruction);
5818 __ movl(out_loc.AsRegisterPairHigh<Register>(),
5819 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset + kX86WordSize));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005820 break;
5821 }
5822
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005823 case DataType::Type::kFloat32: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005824 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005825 __ movss(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005826 break;
5827 }
5828
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005829 case DataType::Type::kFloat64: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005830 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005831 __ movsd(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005832 break;
5833 }
5834
Aart Bik66c158e2018-01-31 12:55:04 -08005835 case DataType::Type::kUint32:
5836 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005837 case DataType::Type::kVoid:
Calin Juravle77520bc2015-01-12 18:45:46 +00005838 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005839 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005840 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005841
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005842 if (type == DataType::Type::kReference || type == DataType::Type::kInt64) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005843 // Potential implicit null checks, in the case of reference or
5844 // long arrays, are handled in the previous switch statement.
5845 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005846 codegen_->MaybeRecordImplicitNullCheck(instruction);
5847 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005848}
5849
5850void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005851 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005852
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005853 bool needs_write_barrier =
5854 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005855 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005856
Vladimir Markoca6fff82017-10-03 14:49:14 +01005857 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffray39468442014-09-02 15:17:15 +01005858 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005859 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005860
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005861 bool is_byte_type = DataType::Size(value_type) == 1u;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005862 // We need the inputs to be different than the output in case of long operation.
5863 // In case of a byte operation, the register allocator does not support multiple
5864 // inputs that die at entry with one in a specific register.
5865 locations->SetInAt(0, Location::RequiresRegister());
5866 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5867 if (is_byte_type) {
5868 // Ensure the value is in a byte register.
5869 locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005870 } else if (DataType::IsFloatingPointType(value_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05005871 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005872 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005873 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5874 }
5875 if (needs_write_barrier) {
5876 // Temporary registers for the write barrier.
5877 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
5878 // Ensure the card is in a byte register.
Roland Levillain4f6b0b52015-11-23 19:29:22 +00005879 locations->AddTemp(Location::RegisterLocation(ECX));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005880 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005881}
5882
5883void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
5884 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005885 Location array_loc = locations->InAt(0);
5886 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005887 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005888 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005889 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005890 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005891 bool needs_write_barrier =
5892 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005893
5894 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005895 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005896 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005897 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005898 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005899 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005900 if (value.IsRegister()) {
5901 __ movb(address, value.AsRegister<ByteRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005902 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005903 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005904 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005905 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005906 break;
5907 }
5908
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005909 case DataType::Type::kUint16:
5910 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005911 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005912 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005913 if (value.IsRegister()) {
5914 __ movw(address, value.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005915 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005916 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005917 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005918 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005919 break;
5920 }
5921
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005922 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005923 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005924 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005925
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005926 if (!value.IsRegister()) {
5927 // Just setting null.
5928 DCHECK(instruction->InputAt(2)->IsNullConstant());
5929 DCHECK(value.IsConstant()) << value;
5930 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005931 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005932 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005933 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005934 break;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005935 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005936
5937 DCHECK(needs_write_barrier);
5938 Register register_value = value.AsRegister<Register>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005939 Location temp_loc = locations->GetTemp(0);
5940 Register temp = temp_loc.AsRegister<Register>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005941
5942 bool can_value_be_null = instruction->GetValueCanBeNull();
5943 NearLabel do_store;
5944 if (can_value_be_null) {
5945 __ testl(register_value, register_value);
5946 __ j(kEqual, &do_store);
5947 }
5948
5949 SlowPathCode* slow_path = nullptr;
5950 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005951 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005952 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005953
5954 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5955 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5956 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005957
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005958 // Note that when Baker read barriers are enabled, the type
5959 // checks are performed without read barriers. This is fine,
5960 // even in the case where a class object is in the from-space
5961 // after the flip, as a comparison involving such a type would
5962 // not produce a false positive; it may of course produce a
5963 // false negative, in which case we would take the ArraySet
5964 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005965
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005966 // /* HeapReference<Class> */ temp = array->klass_
5967 __ movl(temp, Address(array, class_offset));
5968 codegen_->MaybeRecordImplicitNullCheck(instruction);
5969 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005970
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005971 // /* HeapReference<Class> */ temp = temp->component_type_
5972 __ movl(temp, Address(temp, component_offset));
5973 // If heap poisoning is enabled, no need to unpoison `temp`
5974 // nor the object reference in `register_value->klass`, as
5975 // we are comparing two poisoned references.
5976 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005977
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005978 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005979 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005980 __ j(kEqual, &do_put);
5981 // If heap poisoning is enabled, the `temp` reference has
5982 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005983 __ MaybeUnpoisonHeapReference(temp);
5984
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005985 // If heap poisoning is enabled, no need to unpoison the
5986 // heap reference loaded below, as it is only used for a
5987 // comparison with null.
5988 __ cmpl(Address(temp, super_offset), Immediate(0));
5989 __ j(kNotEqual, slow_path->GetEntryLabel());
5990 __ Bind(&do_put);
5991 } else {
5992 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005993 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005994 }
5995
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005996 Register card = locations->GetTemp(1).AsRegister<Register>();
5997 codegen_->MarkGCCard(
5998 temp, card, array, value.AsRegister<Register>(), /* value_can_be_null= */ false);
5999
6000 if (can_value_be_null) {
6001 DCHECK(do_store.IsLinked());
6002 __ Bind(&do_store);
6003 }
6004
6005 Register source = register_value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006006 if (kPoisonHeapReferences) {
6007 __ movl(temp, register_value);
6008 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006009 source = temp;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006010 }
6011
Vladimir Marko8fa839c2019-05-16 12:50:47 +00006012 __ movl(address, source);
6013
6014 if (can_value_be_null || !needs_type_check) {
6015 codegen_->MaybeRecordImplicitNullCheck(instruction);
6016 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006017
Vladimir Marko0dda8c82019-05-16 12:47:40 +00006018 if (slow_path != nullptr) {
6019 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006020 }
6021
6022 break;
6023 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006024
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006025 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006026 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006027 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006028 if (value.IsRegister()) {
6029 __ movl(address, value.AsRegister<Register>());
6030 } else {
6031 DCHECK(value.IsConstant()) << value;
6032 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
6033 __ movl(address, Immediate(v));
6034 }
6035 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006036 break;
6037 }
6038
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006039 case DataType::Type::kInt64: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006040 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006041 if (value.IsRegisterPair()) {
6042 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
6043 value.AsRegisterPairLow<Register>());
6044 codegen_->MaybeRecordImplicitNullCheck(instruction);
6045 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
6046 value.AsRegisterPairHigh<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006047 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006048 DCHECK(value.IsConstant());
6049 int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
6050 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
6051 Immediate(Low32Bits(val)));
6052 codegen_->MaybeRecordImplicitNullCheck(instruction);
6053 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
6054 Immediate(High32Bits(val)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006055 }
6056 break;
6057 }
6058
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006059 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006060 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006061 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006062 if (value.IsFpuRegister()) {
6063 __ movss(address, value.AsFpuRegister<XmmRegister>());
6064 } else {
6065 DCHECK(value.IsConstant());
6066 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
6067 __ movl(address, Immediate(v));
6068 }
6069 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006070 break;
6071 }
6072
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006073 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006074 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006075 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006076 if (value.IsFpuRegister()) {
6077 __ movsd(address, value.AsFpuRegister<XmmRegister>());
6078 } else {
6079 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006080 Address address_hi =
6081 CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset + kX86WordSize);
Mark Mendell81489372015-11-04 11:30:41 -05006082 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
6083 __ movl(address, Immediate(Low32Bits(v)));
6084 codegen_->MaybeRecordImplicitNullCheck(instruction);
6085 __ movl(address_hi, Immediate(High32Bits(v)));
6086 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006087 break;
6088 }
6089
Aart Bik66c158e2018-01-31 12:55:04 -08006090 case DataType::Type::kUint32:
6091 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006092 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006093 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07006094 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006095 }
6096}
6097
6098void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006099 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01006100 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04006101 if (!instruction->IsEmittedAtUseSite()) {
6102 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6103 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006104}
6105
6106void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04006107 if (instruction->IsEmittedAtUseSite()) {
6108 return;
6109 }
6110
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006111 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01006112 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00006113 Register obj = locations->InAt(0).AsRegister<Register>();
6114 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006115 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00006116 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07006117 // Mask out most significant bit in case the array is String's array of char.
6118 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006119 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006120 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006121}
6122
6123void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006124 RegisterSet caller_saves = RegisterSet::Empty();
6125 InvokeRuntimeCallingConvention calling_convention;
6126 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6127 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
6128 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05006129 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04006130 HInstruction* length = instruction->InputAt(1);
6131 if (!length->IsEmittedAtUseSite()) {
6132 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
6133 }
jessicahandojo4877b792016-09-08 19:49:13 -07006134 // Need register to see array's length.
6135 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
6136 locations->AddTemp(Location::RequiresRegister());
6137 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006138}
6139
6140void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
jessicahandojo4877b792016-09-08 19:49:13 -07006141 const bool is_string_compressed_char_at =
6142 mirror::kUseStringCompression && instruction->IsStringCharAt();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006143 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05006144 Location index_loc = locations->InAt(0);
6145 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006146 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006147 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006148
Mark Mendell99dbd682015-04-22 16:18:52 -04006149 if (length_loc.IsConstant()) {
6150 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
6151 if (index_loc.IsConstant()) {
6152 // BCE will remove the bounds check if we are guarenteed to pass.
6153 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6154 if (index < 0 || index >= length) {
6155 codegen_->AddSlowPath(slow_path);
6156 __ jmp(slow_path->GetEntryLabel());
6157 } else {
6158 // Some optimization after BCE may have generated this, and we should not
6159 // generate a bounds check if it is a valid range.
6160 }
6161 return;
6162 }
6163
6164 // We have to reverse the jump condition because the length is the constant.
6165 Register index_reg = index_loc.AsRegister<Register>();
6166 __ cmpl(index_reg, Immediate(length));
6167 codegen_->AddSlowPath(slow_path);
6168 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006169 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04006170 HInstruction* array_length = instruction->InputAt(1);
6171 if (array_length->IsEmittedAtUseSite()) {
6172 // Address the length field in the array.
6173 DCHECK(array_length->IsArrayLength());
6174 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
6175 Location array_loc = array_length->GetLocations()->InAt(0);
6176 Address array_len(array_loc.AsRegister<Register>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07006177 if (is_string_compressed_char_at) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006178 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
6179 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07006180 Register length_reg = locations->GetTemp(0).AsRegister<Register>();
6181 __ movl(length_reg, array_len);
6182 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006183 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006184 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04006185 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006186 // Checking bounds for general case:
6187 // Array of char or string's array with feature compression off.
6188 if (index_loc.IsConstant()) {
6189 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6190 __ cmpl(array_len, Immediate(value));
6191 } else {
6192 __ cmpl(array_len, index_loc.AsRegister<Register>());
6193 }
6194 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04006195 }
Mark Mendell99dbd682015-04-22 16:18:52 -04006196 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006197 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04006198 }
6199 codegen_->AddSlowPath(slow_path);
6200 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006201 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006202}
6203
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006204void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006205 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006206}
6207
6208void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006209 if (instruction->GetNext()->IsSuspendCheck() &&
6210 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6211 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6212 // The back edge will generate the suspend check.
6213 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6214 }
6215
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006216 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6217}
6218
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006219void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006220 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6221 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07006222 // In suspend check slow path, usually there are no caller-save registers at all.
6223 // If SIMD instructions are present, however, we force spilling all live SIMD
6224 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07006225 locations->SetCustomSlowPathCallerSaves(
6226 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006227}
6228
6229void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006230 HBasicBlock* block = instruction->GetBlock();
6231 if (block->GetLoopInformation() != nullptr) {
6232 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6233 // The back edge will generate the suspend check.
6234 return;
6235 }
6236 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6237 // The goto will generate the suspend check.
6238 return;
6239 }
6240 GenerateSuspendCheck(instruction, nullptr);
6241}
6242
6243void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
6244 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006245 SuspendCheckSlowPathX86* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006246 down_cast<SuspendCheckSlowPathX86*>(instruction->GetSlowPath());
6247 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006248 slow_path =
6249 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006250 instruction->SetSlowPath(slow_path);
6251 codegen_->AddSlowPath(slow_path);
6252 if (successor != nullptr) {
6253 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006254 }
6255 } else {
6256 DCHECK_EQ(slow_path->GetSuccessor(), successor);
6257 }
6258
Andreas Gampe542451c2016-07-26 09:02:02 -07006259 __ fs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00006260 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006261 if (successor == nullptr) {
6262 __ j(kNotEqual, slow_path->GetEntryLabel());
6263 __ Bind(slow_path->GetReturnLabel());
6264 } else {
6265 __ j(kEqual, codegen_->GetLabelOf(successor));
6266 __ jmp(slow_path->GetEntryLabel());
6267 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006268}
6269
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006270X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
6271 return codegen_->GetAssembler();
6272}
6273
Aart Bikcfe50bb2017-12-12 14:54:12 -08006274void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006275 ScratchRegisterScope ensure_scratch(
6276 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6277 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
6278 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
Mark Mendell7c8d0092015-01-26 11:21:33 -05006279
Aart Bikcfe50bb2017-12-12 14:54:12 -08006280 // Now that temp register is available (possibly spilled), move blocks of memory.
6281 for (int i = 0; i < number_of_words; i++) {
6282 __ movl(temp_reg, Address(ESP, src + stack_offset));
6283 __ movl(Address(ESP, dst + stack_offset), temp_reg);
6284 stack_offset += kX86WordSize;
6285 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006286}
6287
6288void ParallelMoveResolverX86::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006289 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006290 Location source = move->GetSource();
6291 Location destination = move->GetDestination();
6292
6293 if (source.IsRegister()) {
6294 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006295 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006296 } else if (destination.IsFpuRegister()) {
6297 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006298 } else {
6299 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006300 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006301 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006302 } else if (source.IsRegisterPair()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006303 size_t elem_size = DataType::Size(DataType::Type::kInt32);
David Brazdil74eb1b22015-12-14 11:44:01 +00006304 // Create stack space for 2 elements.
6305 __ subl(ESP, Immediate(2 * elem_size));
6306 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
6307 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
6308 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
6309 // And remove the temporary stack space we allocated.
6310 __ addl(ESP, Immediate(2 * elem_size));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006311 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006312 if (destination.IsRegister()) {
6313 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
6314 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006315 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006316 } else if (destination.IsRegisterPair()) {
6317 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
6318 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
6319 __ psrlq(src_reg, Immediate(32));
6320 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006321 } else if (destination.IsStackSlot()) {
6322 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006323 } else if (destination.IsDoubleStackSlot()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006324 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006325 } else {
6326 DCHECK(destination.IsSIMDStackSlot());
6327 __ movups(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05006328 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006329 } else if (source.IsStackSlot()) {
6330 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006331 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006332 } else if (destination.IsFpuRegister()) {
6333 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006334 } else {
6335 DCHECK(destination.IsStackSlot());
Aart Bikcfe50bb2017-12-12 14:54:12 -08006336 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006337 }
6338 } else if (source.IsDoubleStackSlot()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006339 if (destination.IsRegisterPair()) {
6340 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
6341 __ movl(destination.AsRegisterPairHigh<Register>(),
6342 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
6343 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006344 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6345 } else {
6346 DCHECK(destination.IsDoubleStackSlot()) << destination;
Aart Bikcfe50bb2017-12-12 14:54:12 -08006347 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006348 }
Aart Bik5576f372017-03-23 16:17:37 -07006349 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006350 if (destination.IsFpuRegister()) {
6351 __ movups(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6352 } else {
6353 DCHECK(destination.IsSIMDStackSlot());
6354 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
6355 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006356 } else if (source.IsConstant()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006357 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00006358 if (constant->IsIntConstant() || constant->IsNullConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006359 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006360 if (destination.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006361 if (value == 0) {
6362 __ xorl(destination.AsRegister<Register>(), destination.AsRegister<Register>());
6363 } else {
6364 __ movl(destination.AsRegister<Register>(), Immediate(value));
6365 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006366 } else {
6367 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell09b84632015-02-13 17:48:38 -05006368 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006369 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006370 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006371 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00006372 int32_t value = bit_cast<int32_t, float>(fp_value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006373 Immediate imm(value);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006374 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006375 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
6376 if (value == 0) {
6377 // Easy handling of 0.0.
6378 __ xorps(dest, dest);
6379 } else {
6380 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006381 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6382 Register temp = static_cast<Register>(ensure_scratch.GetRegister());
6383 __ movl(temp, Immediate(value));
6384 __ movd(dest, temp);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006385 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006386 } else {
6387 DCHECK(destination.IsStackSlot()) << destination;
6388 __ movl(Address(ESP, destination.GetStackIndex()), imm);
6389 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006390 } else if (constant->IsLongConstant()) {
6391 int64_t value = constant->AsLongConstant()->GetValue();
6392 int32_t low_value = Low32Bits(value);
6393 int32_t high_value = High32Bits(value);
6394 Immediate low(low_value);
6395 Immediate high(high_value);
6396 if (destination.IsDoubleStackSlot()) {
6397 __ movl(Address(ESP, destination.GetStackIndex()), low);
6398 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
6399 } else {
6400 __ movl(destination.AsRegisterPairLow<Register>(), low);
6401 __ movl(destination.AsRegisterPairHigh<Register>(), high);
6402 }
6403 } else {
6404 DCHECK(constant->IsDoubleConstant());
6405 double dbl_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00006406 int64_t value = bit_cast<int64_t, double>(dbl_value);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006407 int32_t low_value = Low32Bits(value);
6408 int32_t high_value = High32Bits(value);
6409 Immediate low(low_value);
6410 Immediate high(high_value);
6411 if (destination.IsFpuRegister()) {
6412 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
6413 if (value == 0) {
6414 // Easy handling of 0.0.
6415 __ xorpd(dest, dest);
6416 } else {
6417 __ pushl(high);
6418 __ pushl(low);
6419 __ movsd(dest, Address(ESP, 0));
6420 __ addl(ESP, Immediate(8));
6421 }
6422 } else {
6423 DCHECK(destination.IsDoubleStackSlot()) << destination;
6424 __ movl(Address(ESP, destination.GetStackIndex()), low);
6425 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
6426 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006427 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006428 } else {
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00006429 LOG(FATAL) << "Unimplemented move: " << destination << " <- " << source;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006430 }
6431}
6432
Mark Mendella5c19ce2015-04-01 12:51:05 -04006433void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006434 Register suggested_scratch = reg == EAX ? EBX : EAX;
6435 ScratchRegisterScope ensure_scratch(
6436 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
6437
6438 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
6439 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
6440 __ movl(Address(ESP, mem + stack_offset), reg);
6441 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006442}
6443
Mark Mendell7c8d0092015-01-26 11:21:33 -05006444void ParallelMoveResolverX86::Exchange32(XmmRegister reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006445 ScratchRegisterScope ensure_scratch(
6446 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6447
6448 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
6449 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
6450 __ movl(temp_reg, Address(ESP, mem + stack_offset));
6451 __ movss(Address(ESP, mem + stack_offset), reg);
6452 __ movd(reg, temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006453}
6454
Aart Bikcfe50bb2017-12-12 14:54:12 -08006455void ParallelMoveResolverX86::Exchange128(XmmRegister reg, int mem) {
6456 size_t extra_slot = 4 * kX86WordSize;
6457 __ subl(ESP, Immediate(extra_slot));
6458 __ movups(Address(ESP, 0), XmmRegister(reg));
6459 ExchangeMemory(0, mem + extra_slot, 4);
6460 __ movups(XmmRegister(reg), Address(ESP, 0));
6461 __ addl(ESP, Immediate(extra_slot));
6462}
6463
6464void ParallelMoveResolverX86::ExchangeMemory(int mem1, int mem2, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006465 ScratchRegisterScope ensure_scratch1(
6466 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006467
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006468 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
6469 ScratchRegisterScope ensure_scratch2(
6470 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006471
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006472 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
6473 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
Aart Bikcfe50bb2017-12-12 14:54:12 -08006474
6475 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
6476 for (int i = 0; i < number_of_words; i++) {
6477 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
6478 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
6479 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
6480 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
6481 stack_offset += kX86WordSize;
6482 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006483}
6484
6485void ParallelMoveResolverX86::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006486 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006487 Location source = move->GetSource();
6488 Location destination = move->GetDestination();
6489
6490 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell90979812015-07-28 16:41:21 -04006491 // Use XOR swap algorithm to avoid serializing XCHG instruction or using a temporary.
6492 DCHECK_NE(destination.AsRegister<Register>(), source.AsRegister<Register>());
6493 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
6494 __ xorl(source.AsRegister<Register>(), destination.AsRegister<Register>());
6495 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006496 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006497 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006498 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006499 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006500 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006501 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006502 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
6503 // Use XOR Swap algorithm to avoid a temporary.
6504 DCHECK_NE(source.reg(), destination.reg());
6505 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
6506 __ xorpd(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
6507 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
6508 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
6509 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6510 } else if (destination.IsFpuRegister() && source.IsStackSlot()) {
6511 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006512 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
6513 // Take advantage of the 16 bytes in the XMM register.
6514 XmmRegister reg = source.AsFpuRegister<XmmRegister>();
6515 Address stack(ESP, destination.GetStackIndex());
6516 // Load the double into the high doubleword.
6517 __ movhpd(reg, stack);
6518
6519 // Store the low double into the destination.
6520 __ movsd(stack, reg);
6521
6522 // Move the high double to the low double.
6523 __ psrldq(reg, Immediate(8));
6524 } else if (destination.IsFpuRegister() && source.IsDoubleStackSlot()) {
6525 // Take advantage of the 16 bytes in the XMM register.
6526 XmmRegister reg = destination.AsFpuRegister<XmmRegister>();
6527 Address stack(ESP, source.GetStackIndex());
6528 // Load the double into the high doubleword.
6529 __ movhpd(reg, stack);
6530
6531 // Store the low double into the destination.
6532 __ movsd(stack, reg);
6533
6534 // Move the high double to the low double.
6535 __ psrldq(reg, Immediate(8));
6536 } else if (destination.IsDoubleStackSlot() && source.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006537 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
6538 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
6539 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
6540 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
6541 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6542 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
6543 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006544 } else {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006545 LOG(FATAL) << "Unimplemented: source: " << source << ", destination: " << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006546 }
6547}
6548
6549void ParallelMoveResolverX86::SpillScratch(int reg) {
6550 __ pushl(static_cast<Register>(reg));
6551}
6552
6553void ParallelMoveResolverX86::RestoreScratch(int reg) {
6554 __ popl(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006555}
6556
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006557HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
6558 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006559 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006560 case HLoadClass::LoadKind::kInvalid:
6561 LOG(FATAL) << "UNREACHABLE";
6562 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006563 case HLoadClass::LoadKind::kReferrersClass:
6564 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006565 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006566 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006567 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko764d4542017-05-16 10:31:41 +01006568 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006569 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006570 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006571 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006572 DCHECK(Runtime::Current()->UseJitCompilation());
6573 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006574 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006575 break;
6576 }
6577 return desired_class_load_kind;
6578}
6579
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006580void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006581 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006582 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006583 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00006584 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006585 cls,
6586 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00006587 Location::RegisterLocation(EAX));
Vladimir Markoea4c1262017-02-06 19:59:33 +00006588 DCHECK_EQ(calling_convention.GetRegisterAt(0), EAX);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006589 return;
6590 }
Vladimir Marko41559982017-01-06 14:04:23 +00006591 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006592
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006593 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6594 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006595 ? LocationSummary::kCallOnSlowPath
6596 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006597 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006598 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006599 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006600 }
6601
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006602 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006603 load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006604 load_kind == HLoadClass::LoadKind::kBootImageRelRo ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006605 load_kind == HLoadClass::LoadKind::kBssEntry) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006606 locations->SetInAt(0, Location::RequiresRegister());
6607 }
6608 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006609 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6610 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6611 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006612 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006613 } else {
6614 // For non-Baker read barrier we have a temp-clobbering call.
6615 }
6616 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006617}
6618
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006619Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006620 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006621 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006622 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006623 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006624 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006625 PatchInfo<Label>* info = &jit_class_patches_.back();
6626 return &info->label;
6627}
6628
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006629// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6630// move.
6631void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006632 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006633 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006634 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006635 return;
6636 }
Vladimir Marko41559982017-01-06 14:04:23 +00006637 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006638
Vladimir Marko41559982017-01-06 14:04:23 +00006639 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006640 Location out_loc = locations->Out();
6641 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006642
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006643 bool generate_null_check = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006644 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6645 ? kWithoutReadBarrier
6646 : kCompilerReadBarrierOption;
Vladimir Marko41559982017-01-06 14:04:23 +00006647 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006648 case HLoadClass::LoadKind::kReferrersClass: {
6649 DCHECK(!cls->CanCallRuntime());
6650 DCHECK(!cls->MustGenerateClinitCheck());
6651 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6652 Register current_method = locations->InAt(0).AsRegister<Register>();
6653 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006654 cls,
6655 out_loc,
6656 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006657 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006658 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006659 break;
6660 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006661 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006662 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6663 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006664 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006665 Register method_address = locations->InAt(0).AsRegister<Register>();
6666 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006667 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006668 break;
6669 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006670 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006671 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6672 Register method_address = locations->InAt(0).AsRegister<Register>();
6673 __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006674 codegen_->RecordBootImageRelRoPatch(cls->InputAt(0)->AsX86ComputeBaseMethodAddress(),
6675 codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006676 break;
6677 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006678 case HLoadClass::LoadKind::kBssEntry: {
6679 Register method_address = locations->InAt(0).AsRegister<Register>();
6680 Address address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6681 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6682 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006683 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006684 generate_null_check = true;
6685 break;
6686 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006687 case HLoadClass::LoadKind::kJitBootImageAddress: {
6688 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6689 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6690 DCHECK_NE(address, 0u);
6691 __ movl(out, Immediate(address));
6692 break;
6693 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006694 case HLoadClass::LoadKind::kJitTableAddress: {
6695 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6696 Label* fixup_label = codegen_->NewJitRootClassPatch(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006697 cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006698 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006699 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006700 break;
6701 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006702 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006703 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006704 LOG(FATAL) << "UNREACHABLE";
6705 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006706 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006707
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006708 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6709 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006710 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006711 codegen_->AddSlowPath(slow_path);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006712
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006713 if (generate_null_check) {
6714 __ testl(out, out);
6715 __ j(kEqual, slow_path->GetEntryLabel());
6716 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006717
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006718 if (cls->MustGenerateClinitCheck()) {
6719 GenerateClassInitializationCheck(slow_path, out);
6720 } else {
6721 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006722 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006723 }
6724}
6725
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006726void LocationsBuilderX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6727 InvokeRuntimeCallingConvention calling_convention;
6728 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6729 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6730}
6731
6732void InstructionCodeGeneratorX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6733 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6734}
6735
Orion Hodson18259d72018-04-12 11:18:23 +01006736void LocationsBuilderX86::VisitLoadMethodType(HLoadMethodType* load) {
6737 InvokeRuntimeCallingConvention calling_convention;
6738 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6739 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6740}
6741
6742void InstructionCodeGeneratorX86::VisitLoadMethodType(HLoadMethodType* load) {
6743 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6744}
6745
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006746void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
6747 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006748 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006749 locations->SetInAt(0, Location::RequiresRegister());
6750 if (check->HasUses()) {
6751 locations->SetOut(Location::SameAsFirstInput());
6752 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006753 // Rely on the type initialization to save everything we need.
6754 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006755}
6756
6757void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006758 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006759 SlowPathCode* slow_path =
6760 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006761 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006762 GenerateClassInitializationCheck(slow_path,
6763 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006764}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006765
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006766void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006767 SlowPathCode* slow_path, Register class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00006768 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
6769 const size_t status_byte_offset =
6770 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01006771 constexpr uint32_t shifted_visibly_initialized_value =
6772 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00006773
Vladimir Markobf121912019-06-04 13:49:05 +01006774 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00006775 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006776 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006777}
6778
Vladimir Marko175e7862018-03-27 09:03:13 +00006779void InstructionCodeGeneratorX86::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
6780 Register temp) {
6781 uint32_t path_to_root = check->GetBitstringPathToRoot();
6782 uint32_t mask = check->GetBitstringMask();
6783 DCHECK(IsPowerOfTwo(mask + 1));
6784 size_t mask_bits = WhichPowerOf2(mask + 1);
6785
6786 if (mask_bits == 16u) {
6787 // Compare the bitstring in memory.
6788 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
6789 } else {
6790 // /* uint32_t */ temp = temp->status_
6791 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
6792 // Compare the bitstring bits using SUB.
6793 __ subl(temp, Immediate(path_to_root));
6794 // Shift out bits that do not contribute to the comparison.
6795 __ shll(temp, Immediate(32u - mask_bits));
6796 }
6797}
6798
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006799HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
6800 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006801 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006802 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006803 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006804 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko764d4542017-05-16 10:31:41 +01006805 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006806 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006807 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006808 case HLoadString::LoadKind::kJitTableAddress:
6809 DCHECK(Runtime::Current()->UseJitCompilation());
6810 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006811 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006812 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006813 }
6814 return desired_string_load_kind;
6815}
6816
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006817void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006818 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006819 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006820 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006821 if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006822 load_kind == HLoadString::LoadKind::kBootImageRelRo ||
Vladimir Markoaad75c62016-10-03 08:46:48 +00006823 load_kind == HLoadString::LoadKind::kBssEntry) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006824 locations->SetInAt(0, Location::RequiresRegister());
6825 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006826 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006827 locations->SetOut(Location::RegisterLocation(EAX));
6828 } else {
6829 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006830 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6831 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006832 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006833 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006834 } else {
6835 // For non-Baker read barrier we have a temp-clobbering call.
6836 }
6837 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006838 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006839}
6840
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006841Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006842 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006843 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006844 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006845 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006846 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006847 PatchInfo<Label>* info = &jit_string_patches_.back();
6848 return &info->label;
6849}
6850
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006851// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6852// move.
6853void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006854 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006855 Location out_loc = locations->Out();
6856 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006857
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006858 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006859 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006860 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6861 codegen_->GetCompilerOptions().IsBootImageExtension());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006862 Register method_address = locations->InAt(0).AsRegister<Register>();
6863 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006864 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006865 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006866 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006867 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006868 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6869 Register method_address = locations->InAt(0).AsRegister<Register>();
6870 __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006871 codegen_->RecordBootImageRelRoPatch(load->InputAt(0)->AsX86ComputeBaseMethodAddress(),
6872 codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006873 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006874 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006875 case HLoadString::LoadKind::kBssEntry: {
6876 Register method_address = locations->InAt(0).AsRegister<Register>();
6877 Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6878 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006879 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006880 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006881 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006882 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006883 codegen_->AddSlowPath(slow_path);
6884 __ testl(out, out);
6885 __ j(kEqual, slow_path->GetEntryLabel());
6886 __ Bind(slow_path->GetExitLabel());
6887 return;
6888 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006889 case HLoadString::LoadKind::kJitBootImageAddress: {
6890 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6891 DCHECK_NE(address, 0u);
6892 __ movl(out, Immediate(address));
6893 return;
6894 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006895 case HLoadString::LoadKind::kJitTableAddress: {
6896 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6897 Label* fixup_label = codegen_->NewJitRootStringPatch(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006898 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006899 // /* GcRoot<mirror::String> */ out = *address
6900 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6901 return;
6902 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006903 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006904 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006905 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006906
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006907 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006908 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006909 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006910 __ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex().index_));
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006911 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6912 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006913}
6914
David Brazdilcb1c0552015-08-04 16:22:25 +01006915static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006916 return Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01006917}
6918
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006919void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
6920 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006921 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006922 locations->SetOut(Location::RequiresRegister());
6923}
6924
6925void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006926 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
6927}
6928
6929void LocationsBuilderX86::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006930 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006931}
6932
6933void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6934 __ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006935}
6936
6937void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006938 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6939 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006940 InvokeRuntimeCallingConvention calling_convention;
6941 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6942}
6943
6944void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006945 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006946 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006947}
6948
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006949// Temp is used for read barrier.
6950static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6951 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006952 !kUseBakerReadBarrier &&
6953 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain7c1559a2015-12-15 10:55:36 +00006954 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006955 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6956 return 1;
6957 }
6958 return 0;
6959}
6960
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006961// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6962// interface pointer, the current interface is compared in memory.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006963// The other checks have one temp for loading the object's class.
6964static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006965 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006966 return 2;
6967 }
6968 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006969}
6970
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006971void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006972 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006973 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006974 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006975 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006976 case TypeCheckKind::kExactCheck:
6977 case TypeCheckKind::kAbstractClassCheck:
6978 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006979 case TypeCheckKind::kArrayObjectCheck: {
6980 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6981 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6982 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006983 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006984 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006985 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006986 case TypeCheckKind::kUnresolvedCheck:
6987 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006988 call_kind = LocationSummary::kCallOnSlowPath;
6989 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006990 case TypeCheckKind::kBitstringCheck:
6991 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006992 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006993
Vladimir Markoca6fff82017-10-03 14:49:14 +01006994 LocationSummary* locations =
6995 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006996 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006997 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006998 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006999 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007000 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
7001 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
7002 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
7003 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
7004 } else {
7005 locations->SetInAt(1, Location::Any());
7006 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007007 // Note that TypeCheckSlowPathX86 uses this "out" register too.
7008 locations->SetOut(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007009 // When read barriers are enabled, we need a temporary register for some cases.
7010 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007011}
7012
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007013void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007014 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007015 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007016 Location obj_loc = locations->InAt(0);
7017 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007018 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007019 Location out_loc = locations->Out();
7020 Register out = out_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007021 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7022 DCHECK_LE(num_temps, 1u);
7023 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007024 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007025 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7026 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7027 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07007028 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007029 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007030
7031 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007032 // Avoid null check if we know obj is not null.
7033 if (instruction->MustDoNullCheck()) {
7034 __ testl(obj, obj);
7035 __ j(kEqual, &zero);
7036 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007037
Roland Levillain7c1559a2015-12-15 10:55:36 +00007038 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007039 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007040 ReadBarrierOption read_barrier_option =
7041 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007042 // /* HeapReference<Class> */ out = obj->klass_
7043 GenerateReferenceLoadTwoRegisters(instruction,
7044 out_loc,
7045 obj_loc,
7046 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007047 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007048 if (cls.IsRegister()) {
7049 __ cmpl(out, cls.AsRegister<Register>());
7050 } else {
7051 DCHECK(cls.IsStackSlot()) << cls;
7052 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7053 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007054
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007055 // Classes must be equal for the instanceof to succeed.
7056 __ j(kNotEqual, &zero);
7057 __ movl(out, Immediate(1));
7058 __ jmp(&done);
7059 break;
7060 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007061
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007062 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007063 ReadBarrierOption read_barrier_option =
7064 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007065 // /* HeapReference<Class> */ out = obj->klass_
7066 GenerateReferenceLoadTwoRegisters(instruction,
7067 out_loc,
7068 obj_loc,
7069 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007070 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007071 // If the class is abstract, we eagerly fetch the super class of the
7072 // object to avoid doing a comparison we know will fail.
7073 NearLabel loop;
7074 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007075 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007076 GenerateReferenceLoadOneRegister(instruction,
7077 out_loc,
7078 super_offset,
7079 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007080 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007081 __ testl(out, out);
7082 // If `out` is null, we use it for the result, and jump to `done`.
7083 __ j(kEqual, &done);
7084 if (cls.IsRegister()) {
7085 __ cmpl(out, cls.AsRegister<Register>());
7086 } else {
7087 DCHECK(cls.IsStackSlot()) << cls;
7088 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7089 }
7090 __ j(kNotEqual, &loop);
7091 __ movl(out, Immediate(1));
7092 if (zero.IsLinked()) {
7093 __ jmp(&done);
7094 }
7095 break;
7096 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007097
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007098 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007099 ReadBarrierOption read_barrier_option =
7100 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007101 // /* HeapReference<Class> */ out = obj->klass_
7102 GenerateReferenceLoadTwoRegisters(instruction,
7103 out_loc,
7104 obj_loc,
7105 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007106 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007107 // Walk over the class hierarchy to find a match.
7108 NearLabel loop, success;
7109 __ Bind(&loop);
7110 if (cls.IsRegister()) {
7111 __ cmpl(out, cls.AsRegister<Register>());
7112 } else {
7113 DCHECK(cls.IsStackSlot()) << cls;
7114 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7115 }
7116 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007117 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007118 GenerateReferenceLoadOneRegister(instruction,
7119 out_loc,
7120 super_offset,
7121 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007122 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007123 __ testl(out, out);
7124 __ j(kNotEqual, &loop);
7125 // If `out` is null, we use it for the result, and jump to `done`.
7126 __ jmp(&done);
7127 __ Bind(&success);
7128 __ movl(out, Immediate(1));
7129 if (zero.IsLinked()) {
7130 __ jmp(&done);
7131 }
7132 break;
7133 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007134
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007135 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007136 ReadBarrierOption read_barrier_option =
7137 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007138 // /* HeapReference<Class> */ out = obj->klass_
7139 GenerateReferenceLoadTwoRegisters(instruction,
7140 out_loc,
7141 obj_loc,
7142 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007143 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007144 // Do an exact check.
7145 NearLabel exact_check;
7146 if (cls.IsRegister()) {
7147 __ cmpl(out, cls.AsRegister<Register>());
7148 } else {
7149 DCHECK(cls.IsStackSlot()) << cls;
7150 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7151 }
7152 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007153 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007154 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007155 GenerateReferenceLoadOneRegister(instruction,
7156 out_loc,
7157 component_offset,
7158 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007159 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007160 __ testl(out, out);
7161 // If `out` is null, we use it for the result, and jump to `done`.
7162 __ j(kEqual, &done);
7163 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
7164 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007165 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007166 __ movl(out, Immediate(1));
7167 __ jmp(&done);
7168 break;
7169 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007170
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007171 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007172 // No read barrier since the slow path will retry upon failure.
7173 // /* HeapReference<Class> */ out = obj->klass_
7174 GenerateReferenceLoadTwoRegisters(instruction,
7175 out_loc,
7176 obj_loc,
7177 class_offset,
7178 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007179 if (cls.IsRegister()) {
7180 __ cmpl(out, cls.AsRegister<Register>());
7181 } else {
7182 DCHECK(cls.IsStackSlot()) << cls;
7183 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7184 }
7185 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007186 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007187 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007188 codegen_->AddSlowPath(slow_path);
7189 __ j(kNotEqual, slow_path->GetEntryLabel());
7190 __ movl(out, Immediate(1));
7191 if (zero.IsLinked()) {
7192 __ jmp(&done);
7193 }
7194 break;
7195 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007196
Calin Juravle98893e12015-10-02 21:05:03 +01007197 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00007198 case TypeCheckKind::kInterfaceCheck: {
7199 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007200 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00007201 // cases.
7202 //
7203 // We cannot directly call the InstanceofNonTrivial runtime
7204 // entry point without resorting to a type checking slow path
7205 // here (i.e. by calling InvokeRuntime directly), as it would
7206 // require to assign fixed registers for the inputs of this
7207 // HInstanceOf instruction (following the runtime calling
7208 // convention), which might be cluttered by the potential first
7209 // read barrier emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007210 //
7211 // TODO: Introduce a new runtime entry point taking the object
7212 // to test (instead of its class) as argument, and let it deal
7213 // with the read barrier issues. This will let us refactor this
7214 // case of the `switch` code as it was previously (with a direct
7215 // call to the runtime not using a type checking slow path).
7216 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007217 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007218 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007219 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007220 codegen_->AddSlowPath(slow_path);
7221 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007222 if (zero.IsLinked()) {
7223 __ jmp(&done);
7224 }
7225 break;
7226 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007227
7228 case TypeCheckKind::kBitstringCheck: {
7229 // /* HeapReference<Class> */ temp = obj->klass_
7230 GenerateReferenceLoadTwoRegisters(instruction,
7231 out_loc,
7232 obj_loc,
7233 class_offset,
7234 kWithoutReadBarrier);
7235
7236 GenerateBitstringTypeCheckCompare(instruction, out);
7237 __ j(kNotEqual, &zero);
7238 __ movl(out, Immediate(1));
7239 __ jmp(&done);
7240 break;
7241 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007242 }
7243
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007244 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007245 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007246 __ xorl(out, out);
7247 }
7248
7249 if (done.IsLinked()) {
7250 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007251 }
7252
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007253 if (slow_path != nullptr) {
7254 __ Bind(slow_path->GetExitLabel());
7255 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007256}
7257
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007258void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007259 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00007260 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007261 LocationSummary* locations =
7262 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007263 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007264 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
7265 // Require a register for the interface check since there is a loop that compares the class to
7266 // a memory address.
7267 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007268 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
7269 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
7270 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
7271 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007272 } else {
7273 locations->SetInAt(1, Location::Any());
7274 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01007275 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007276 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
7277}
7278
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007279void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007280 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007281 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007282 Location obj_loc = locations->InAt(0);
7283 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007284 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007285 Location temp_loc = locations->GetTemp(0);
7286 Register temp = temp_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007287 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
7288 DCHECK_GE(num_temps, 1u);
7289 DCHECK_LE(num_temps, 2u);
7290 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
7291 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7292 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7293 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7294 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
7295 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
7296 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
7297 const uint32_t object_array_data_offset =
7298 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007299
Vladimir Marko87584542017-12-12 17:47:52 +00007300 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007301 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007302 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
7303 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007304 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007305
Roland Levillain0d5a2812015-11-13 10:07:31 +00007306 NearLabel done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007307 // Avoid null check if we know obj is not null.
7308 if (instruction->MustDoNullCheck()) {
7309 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007310 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007311 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007312
Roland Levillain0d5a2812015-11-13 10:07:31 +00007313 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007314 case TypeCheckKind::kExactCheck:
7315 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007316 // /* HeapReference<Class> */ temp = obj->klass_
7317 GenerateReferenceLoadTwoRegisters(instruction,
7318 temp_loc,
7319 obj_loc,
7320 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007321 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007322
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007323 if (cls.IsRegister()) {
7324 __ cmpl(temp, cls.AsRegister<Register>());
7325 } else {
7326 DCHECK(cls.IsStackSlot()) << cls;
7327 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7328 }
7329 // Jump to slow path for throwing the exception or doing a
7330 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007331 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007332 break;
7333 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007334
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007335 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007336 // /* HeapReference<Class> */ temp = obj->klass_
7337 GenerateReferenceLoadTwoRegisters(instruction,
7338 temp_loc,
7339 obj_loc,
7340 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007341 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007342
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007343 // If the class is abstract, we eagerly fetch the super class of the
7344 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007345 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007346 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007347 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007348 GenerateReferenceLoadOneRegister(instruction,
7349 temp_loc,
7350 super_offset,
7351 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007352 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007353
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007354 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7355 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007356 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007357 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00007358
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007359 // Otherwise, compare the classes
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007360 if (cls.IsRegister()) {
7361 __ cmpl(temp, cls.AsRegister<Register>());
7362 } else {
7363 DCHECK(cls.IsStackSlot()) << cls;
7364 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7365 }
7366 __ j(kNotEqual, &loop);
7367 break;
7368 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007370 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007371 // /* HeapReference<Class> */ temp = obj->klass_
7372 GenerateReferenceLoadTwoRegisters(instruction,
7373 temp_loc,
7374 obj_loc,
7375 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007376 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007377
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007378 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007379 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007380 __ Bind(&loop);
7381 if (cls.IsRegister()) {
7382 __ cmpl(temp, cls.AsRegister<Register>());
7383 } else {
7384 DCHECK(cls.IsStackSlot()) << cls;
7385 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7386 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007387 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007388
Roland Levillain0d5a2812015-11-13 10:07:31 +00007389 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007390 GenerateReferenceLoadOneRegister(instruction,
7391 temp_loc,
7392 super_offset,
7393 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007394 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007395
7396 // If the class reference currently in `temp` is not null, jump
7397 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007398 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007399 __ j(kNotZero, &loop);
7400 // Otherwise, jump to the slow path to throw the exception.;
Roland Levillain0d5a2812015-11-13 10:07:31 +00007401 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007402 break;
7403 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007404
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007405 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007406 // /* HeapReference<Class> */ temp = obj->klass_
7407 GenerateReferenceLoadTwoRegisters(instruction,
7408 temp_loc,
7409 obj_loc,
7410 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007411 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007412
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007413 // Do an exact check.
7414 if (cls.IsRegister()) {
7415 __ cmpl(temp, cls.AsRegister<Register>());
7416 } else {
7417 DCHECK(cls.IsStackSlot()) << cls;
7418 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7419 }
7420 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007421
7422 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007423 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007424 GenerateReferenceLoadOneRegister(instruction,
7425 temp_loc,
7426 component_offset,
7427 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007428 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007429
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007430 // If the component type is null (i.e. the object not an array), jump to the slow path to
7431 // throw the exception. Otherwise proceed with the check.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007432 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007433 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00007434
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007435 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007436 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007437 break;
7438 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007439
Calin Juravle98893e12015-10-02 21:05:03 +01007440 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007441 // We always go into the type check slow path for the unresolved check case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007442 // We cannot directly call the CheckCast runtime entry point
7443 // without resorting to a type checking slow path here (i.e. by
7444 // calling InvokeRuntime directly), as it would require to
7445 // assign fixed registers for the inputs of this HInstanceOf
7446 // instruction (following the runtime calling convention), which
7447 // might be cluttered by the potential first read barrier
7448 // emission at the beginning of this method.
7449 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007450 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007451
7452 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007453 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
7454 // We can not get false positives by doing this.
7455 // /* HeapReference<Class> */ temp = obj->klass_
7456 GenerateReferenceLoadTwoRegisters(instruction,
7457 temp_loc,
7458 obj_loc,
7459 class_offset,
7460 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007461
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007462 // /* HeapReference<Class> */ temp = temp->iftable_
7463 GenerateReferenceLoadTwoRegisters(instruction,
7464 temp_loc,
7465 temp_loc,
7466 iftable_offset,
7467 kWithoutReadBarrier);
7468 // Iftable is never null.
7469 __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
7470 // Maybe poison the `cls` for direct comparison with memory.
7471 __ MaybePoisonHeapReference(cls.AsRegister<Register>());
7472 // Loop through the iftable and check if any class matches.
7473 NearLabel start_loop;
7474 __ Bind(&start_loop);
7475 // Need to subtract first to handle the empty array case.
7476 __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
7477 __ j(kNegative, type_check_slow_path->GetEntryLabel());
7478 // Go to next interface if the classes do not match.
7479 __ cmpl(cls.AsRegister<Register>(),
7480 CodeGeneratorX86::ArrayAddress(temp,
7481 maybe_temp2_loc,
7482 TIMES_4,
7483 object_array_data_offset));
7484 __ j(kNotEqual, &start_loop);
7485 // If `cls` was poisoned above, unpoison it.
7486 __ MaybeUnpoisonHeapReference(cls.AsRegister<Register>());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007487 break;
7488 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007489
7490 case TypeCheckKind::kBitstringCheck: {
7491 // /* HeapReference<Class> */ temp = obj->klass_
7492 GenerateReferenceLoadTwoRegisters(instruction,
7493 temp_loc,
7494 obj_loc,
7495 class_offset,
7496 kWithoutReadBarrier);
7497
7498 GenerateBitstringTypeCheckCompare(instruction, temp);
7499 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
7500 break;
7501 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007502 }
7503 __ Bind(&done);
7504
Roland Levillain0d5a2812015-11-13 10:07:31 +00007505 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007506}
7507
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007508void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007509 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7510 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007511 InvokeRuntimeCallingConvention calling_convention;
7512 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7513}
7514
7515void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01007516 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
7517 : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01007518 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01007519 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00007520 if (instruction->IsEnter()) {
7521 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7522 } else {
7523 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7524 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007525}
7526
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05307527void LocationsBuilderX86::VisitX86AndNot(HX86AndNot* instruction) {
7528 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7529 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
7530 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7531 locations->SetInAt(0, Location::RequiresRegister());
7532 locations->SetInAt(1, Location::RequiresRegister());
7533 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7534}
7535
7536void InstructionCodeGeneratorX86::VisitX86AndNot(HX86AndNot* instruction) {
7537 LocationSummary* locations = instruction->GetLocations();
7538 Location first = locations->InAt(0);
7539 Location second = locations->InAt(1);
7540 Location dest = locations->Out();
7541 if (instruction->GetResultType() == DataType::Type::kInt32) {
7542 __ andn(dest.AsRegister<Register>(),
7543 first.AsRegister<Register>(),
7544 second.AsRegister<Register>());
7545 } else {
7546 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
7547 __ andn(dest.AsRegisterPairLow<Register>(),
7548 first.AsRegisterPairLow<Register>(),
7549 second.AsRegisterPairLow<Register>());
7550 __ andn(dest.AsRegisterPairHigh<Register>(),
7551 first.AsRegisterPairHigh<Register>(),
7552 second.AsRegisterPairHigh<Register>());
7553 }
7554}
7555
7556void LocationsBuilderX86::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
7557 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7558 DCHECK(instruction->GetType() == DataType::Type::kInt32) << instruction->GetType();
7559 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7560 locations->SetInAt(0, Location::RequiresRegister());
7561 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7562}
7563
7564void InstructionCodeGeneratorX86::VisitX86MaskOrResetLeastSetBit(
7565 HX86MaskOrResetLeastSetBit* instruction) {
7566 LocationSummary* locations = instruction->GetLocations();
7567 Location src = locations->InAt(0);
7568 Location dest = locations->Out();
7569 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
7570 switch (instruction->GetOpKind()) {
7571 case HInstruction::kAnd:
7572 __ blsr(dest.AsRegister<Register>(), src.AsRegister<Register>());
7573 break;
7574 case HInstruction::kXor:
7575 __ blsmsk(dest.AsRegister<Register>(), src.AsRegister<Register>());
7576 break;
7577 default:
7578 LOG(FATAL) << "Unreachable";
7579 }
7580}
7581
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007582void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
7583void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
7584void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
7585
7586void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
7587 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007588 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007589 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
7590 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007591 locations->SetInAt(0, Location::RequiresRegister());
7592 locations->SetInAt(1, Location::Any());
7593 locations->SetOut(Location::SameAsFirstInput());
7594}
7595
7596void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
7597 HandleBitwiseOperation(instruction);
7598}
7599
7600void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
7601 HandleBitwiseOperation(instruction);
7602}
7603
7604void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
7605 HandleBitwiseOperation(instruction);
7606}
7607
7608void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
7609 LocationSummary* locations = instruction->GetLocations();
7610 Location first = locations->InAt(0);
7611 Location second = locations->InAt(1);
7612 DCHECK(first.Equals(locations->Out()));
7613
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007614 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007615 if (second.IsRegister()) {
7616 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007617 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007618 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007619 __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007620 } else {
7621 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007622 __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007623 }
7624 } else if (second.IsConstant()) {
7625 if (instruction->IsAnd()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007626 __ andl(first.AsRegister<Register>(),
7627 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007628 } else if (instruction->IsOr()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007629 __ orl(first.AsRegister<Register>(),
7630 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007631 } else {
7632 DCHECK(instruction->IsXor());
Roland Levillain199f3362014-11-27 17:15:16 +00007633 __ xorl(first.AsRegister<Register>(),
7634 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007635 }
7636 } else {
7637 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007638 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007639 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007640 __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007641 } else {
7642 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007643 __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007644 }
7645 }
7646 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007647 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007648 if (second.IsRegisterPair()) {
7649 if (instruction->IsAnd()) {
7650 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7651 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7652 } else if (instruction->IsOr()) {
7653 __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7654 __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7655 } else {
7656 DCHECK(instruction->IsXor());
7657 __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7658 __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7659 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007660 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007661 if (instruction->IsAnd()) {
7662 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7663 __ andl(first.AsRegisterPairHigh<Register>(),
7664 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7665 } else if (instruction->IsOr()) {
7666 __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7667 __ orl(first.AsRegisterPairHigh<Register>(),
7668 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7669 } else {
7670 DCHECK(instruction->IsXor());
7671 __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7672 __ xorl(first.AsRegisterPairHigh<Register>(),
7673 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7674 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007675 } else {
7676 DCHECK(second.IsConstant()) << second;
7677 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007678 int32_t low_value = Low32Bits(value);
7679 int32_t high_value = High32Bits(value);
7680 Immediate low(low_value);
7681 Immediate high(high_value);
7682 Register first_low = first.AsRegisterPairLow<Register>();
7683 Register first_high = first.AsRegisterPairHigh<Register>();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007684 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007685 if (low_value == 0) {
7686 __ xorl(first_low, first_low);
7687 } else if (low_value != -1) {
7688 __ andl(first_low, low);
7689 }
7690 if (high_value == 0) {
7691 __ xorl(first_high, first_high);
7692 } else if (high_value != -1) {
7693 __ andl(first_high, high);
7694 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007695 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007696 if (low_value != 0) {
7697 __ orl(first_low, low);
7698 }
7699 if (high_value != 0) {
7700 __ orl(first_high, high);
7701 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007702 } else {
7703 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007704 if (low_value != 0) {
7705 __ xorl(first_low, low);
7706 }
7707 if (high_value != 0) {
7708 __ xorl(first_high, high);
7709 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007710 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007711 }
7712 }
7713}
7714
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007715void InstructionCodeGeneratorX86::GenerateReferenceLoadOneRegister(
7716 HInstruction* instruction,
7717 Location out,
7718 uint32_t offset,
7719 Location maybe_temp,
7720 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007721 Register out_reg = out.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007722 if (read_barrier_option == kWithReadBarrier) {
7723 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007724 if (kUseBakerReadBarrier) {
7725 // Load with fast path based Baker's read barrier.
7726 // /* HeapReference<Object> */ out = *(out + offset)
7727 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007728 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007729 } else {
7730 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007731 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain7c1559a2015-12-15 10:55:36 +00007732 // in the following move operation, as we will need it for the
7733 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007734 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007735 __ movl(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007736 // /* HeapReference<Object> */ out = *(out + offset)
7737 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007738 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007739 }
7740 } else {
7741 // Plain load with no read barrier.
7742 // /* HeapReference<Object> */ out = *(out + offset)
7743 __ movl(out_reg, Address(out_reg, offset));
7744 __ MaybeUnpoisonHeapReference(out_reg);
7745 }
7746}
7747
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007748void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(
7749 HInstruction* instruction,
7750 Location out,
7751 Location obj,
7752 uint32_t offset,
7753 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007754 Register out_reg = out.AsRegister<Register>();
7755 Register obj_reg = obj.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007756 if (read_barrier_option == kWithReadBarrier) {
7757 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007758 if (kUseBakerReadBarrier) {
7759 // Load with fast path based Baker's read barrier.
7760 // /* HeapReference<Object> */ out = *(obj + offset)
7761 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007762 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007763 } else {
7764 // Load with slow path based read barrier.
7765 // /* HeapReference<Object> */ out = *(obj + offset)
7766 __ movl(out_reg, Address(obj_reg, offset));
7767 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7768 }
7769 } else {
7770 // Plain load with no read barrier.
7771 // /* HeapReference<Object> */ out = *(obj + offset)
7772 __ movl(out_reg, Address(obj_reg, offset));
7773 __ MaybeUnpoisonHeapReference(out_reg);
7774 }
7775}
7776
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007777void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(
7778 HInstruction* instruction,
7779 Location root,
7780 const Address& address,
7781 Label* fixup_label,
7782 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007783 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007784 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007785 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007786 if (kUseBakerReadBarrier) {
7787 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7788 // Baker's read barrier are used:
7789 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007790 // root = obj.field;
7791 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7792 // if (temp != null) {
7793 // root = temp(root)
Roland Levillain7c1559a2015-12-15 10:55:36 +00007794 // }
7795
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007796 // /* GcRoot<mirror::Object> */ root = *address
7797 __ movl(root_reg, address);
7798 if (fixup_label != nullptr) {
7799 __ Bind(fixup_label);
7800 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007801 static_assert(
7802 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7803 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7804 "have different sizes.");
7805 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7806 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7807 "have different sizes.");
7808
Vladimir Marko953437b2016-08-24 08:30:46 +00007809 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007810 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007811 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007812 codegen_->AddSlowPath(slow_path);
7813
Roland Levillaind966ce72017-02-09 16:20:14 +00007814 // Test the entrypoint (`Thread::Current()->pReadBarrierMarkReg ## root.reg()`).
7815 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007816 Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(root.reg());
Roland Levillaind966ce72017-02-09 16:20:14 +00007817 __ fs()->cmpl(Address::Absolute(entry_point_offset), Immediate(0));
7818 // The entrypoint is null when the GC is not marking.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007819 __ j(kNotEqual, slow_path->GetEntryLabel());
7820 __ Bind(slow_path->GetExitLabel());
7821 } else {
7822 // GC root loaded through a slow path for read barriers other
7823 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007824 // /* GcRoot<mirror::Object>* */ root = address
7825 __ leal(root_reg, address);
7826 if (fixup_label != nullptr) {
7827 __ Bind(fixup_label);
7828 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007829 // /* mirror::Object* */ root = root->Read()
7830 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7831 }
7832 } else {
7833 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007834 // /* GcRoot<mirror::Object> */ root = *address
7835 __ movl(root_reg, address);
7836 if (fixup_label != nullptr) {
7837 __ Bind(fixup_label);
7838 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007839 // Note that GC roots are not affected by heap poisoning, thus we
7840 // do not have to unpoison `root_reg` here.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007841 }
7842}
7843
7844void CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7845 Location ref,
7846 Register obj,
7847 uint32_t offset,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007848 bool needs_null_check) {
7849 DCHECK(kEmitCompilerReadBarrier);
7850 DCHECK(kUseBakerReadBarrier);
7851
7852 // /* HeapReference<Object> */ ref = *(obj + offset)
7853 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007854 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007855}
7856
7857void CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7858 Location ref,
7859 Register obj,
7860 uint32_t data_offset,
7861 Location index,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007862 bool needs_null_check) {
7863 DCHECK(kEmitCompilerReadBarrier);
7864 DCHECK(kUseBakerReadBarrier);
7865
Roland Levillain3d312422016-06-23 13:53:42 +01007866 static_assert(
7867 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7868 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00007869 // /* HeapReference<Object> */ ref =
7870 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007871 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007872 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007873}
7874
7875void CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7876 Location ref,
7877 Register obj,
7878 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007879 bool needs_null_check,
7880 bool always_update_field,
7881 Register* temp) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007882 DCHECK(kEmitCompilerReadBarrier);
7883 DCHECK(kUseBakerReadBarrier);
7884
7885 // In slow path based read barriers, the read barrier call is
7886 // inserted after the original load. However, in fast path based
7887 // Baker's read barriers, we need to perform the load of
7888 // mirror::Object::monitor_ *before* the original reference load.
7889 // This load-load ordering is required by the read barrier.
7890 // The fast path/slow path (for Baker's algorithm) should look like:
7891 //
7892 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7893 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7894 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007895 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007896 // if (is_gray) {
7897 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7898 // }
7899 //
7900 // Note: the original implementation in ReadBarrier::Barrier is
7901 // slightly more complex as:
7902 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007903 // the high-bits of rb_state, which are expected to be all zeroes
7904 // (we use CodeGeneratorX86::GenerateMemoryBarrier instead here,
7905 // which is a no-op thanks to the x86 memory model);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007906 // - it performs additional checks that we do not do here for
7907 // performance reasons.
7908
7909 Register ref_reg = ref.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +00007910 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7911
Vladimir Marko953437b2016-08-24 08:30:46 +00007912 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007913 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007914 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007915 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7916 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7917 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7918
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007919 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007920 // ref = ReadBarrier::Mark(ref);
7921 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7922 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain7c1559a2015-12-15 10:55:36 +00007923 if (needs_null_check) {
7924 MaybeRecordImplicitNullCheck(instruction);
7925 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007926
7927 // Load fence to prevent load-load reordering.
7928 // Note that this is a no-op, thanks to the x86 memory model.
7929 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7930
7931 // The actual reference load.
7932 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007933 __ movl(ref_reg, src); // Flags are unaffected.
7934
7935 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7936 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007937 SlowPathCode* slow_path;
7938 if (always_update_field) {
7939 DCHECK(temp != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007940 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007941 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007942 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007943 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007944 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007945 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007946 AddSlowPath(slow_path);
7947
7948 // We have done the "if" of the gray bit check above, now branch based on the flags.
7949 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007950
7951 // Object* ref = ref_addr->AsMirrorPtr()
7952 __ MaybeUnpoisonHeapReference(ref_reg);
7953
Roland Levillain7c1559a2015-12-15 10:55:36 +00007954 __ Bind(slow_path->GetExitLabel());
7955}
7956
7957void CodeGeneratorX86::GenerateReadBarrierSlow(HInstruction* instruction,
7958 Location out,
7959 Location ref,
7960 Location obj,
7961 uint32_t offset,
7962 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007963 DCHECK(kEmitCompilerReadBarrier);
7964
Roland Levillain7c1559a2015-12-15 10:55:36 +00007965 // Insert a slow path based read barrier *after* the reference load.
7966 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007967 // If heap poisoning is enabled, the unpoisoning of the loaded
7968 // reference will be carried out by the runtime within the slow
7969 // path.
7970 //
7971 // Note that `ref` currently does not get unpoisoned (when heap
7972 // poisoning is enabled), which is alright as the `ref` argument is
7973 // not used by the artReadBarrierSlow entry point.
7974 //
7975 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007976 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007977 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
7978 AddSlowPath(slow_path);
7979
Roland Levillain0d5a2812015-11-13 10:07:31 +00007980 __ jmp(slow_path->GetEntryLabel());
7981 __ Bind(slow_path->GetExitLabel());
7982}
7983
Roland Levillain7c1559a2015-12-15 10:55:36 +00007984void CodeGeneratorX86::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7985 Location out,
7986 Location ref,
7987 Location obj,
7988 uint32_t offset,
7989 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007990 if (kEmitCompilerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007991 // Baker's read barriers shall be handled by the fast path
7992 // (CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier).
7993 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007994 // If heap poisoning is enabled, unpoisoning will be taken care of
7995 // by the runtime within the slow path.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007996 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007997 } else if (kPoisonHeapReferences) {
7998 __ UnpoisonHeapReference(out.AsRegister<Register>());
7999 }
8000}
8001
Roland Levillain7c1559a2015-12-15 10:55:36 +00008002void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction,
8003 Location out,
8004 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00008005 DCHECK(kEmitCompilerReadBarrier);
8006
Roland Levillain7c1559a2015-12-15 10:55:36 +00008007 // Insert a slow path based read barrier *after* the GC root load.
8008 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00008009 // Note that GC roots are not affected by heap poisoning, so we do
8010 // not need to do anything special for this here.
8011 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01008012 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00008013 AddSlowPath(slow_path);
8014
Roland Levillain0d5a2812015-11-13 10:07:31 +00008015 __ jmp(slow_path->GetEntryLabel());
8016 __ Bind(slow_path->GetExitLabel());
8017}
8018
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01008019void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00008020 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00008021 LOG(FATAL) << "Unreachable";
8022}
8023
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01008024void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00008025 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00008026 LOG(FATAL) << "Unreachable";
8027}
8028
Mark Mendellfe57faa2015-09-18 09:26:15 -04008029// Simple implementation of packed switch - generate cascaded compare/jumps.
8030void LocationsBuilderX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8031 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008032 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04008033 locations->SetInAt(0, Location::RequiresRegister());
8034}
8035
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008036void InstructionCodeGeneratorX86::GenPackedSwitchWithCompares(Register value_reg,
8037 int32_t lower_bound,
8038 uint32_t num_entries,
8039 HBasicBlock* switch_block,
8040 HBasicBlock* default_block) {
8041 // Figure out the correct compare values and jump conditions.
8042 // Handle the first compare/branch as a special case because it might
8043 // jump to the default case.
8044 DCHECK_GT(num_entries, 2u);
8045 Condition first_condition;
8046 uint32_t index;
8047 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8048 if (lower_bound != 0) {
8049 first_condition = kLess;
8050 __ cmpl(value_reg, Immediate(lower_bound));
8051 __ j(first_condition, codegen_->GetLabelOf(default_block));
8052 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008053
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008054 index = 1;
8055 } else {
8056 // Handle all the compare/jumps below.
8057 first_condition = kBelow;
8058 index = 0;
8059 }
8060
8061 // Handle the rest of the compare/jumps.
8062 for (; index + 1 < num_entries; index += 2) {
8063 int32_t compare_to_value = lower_bound + index + 1;
8064 __ cmpl(value_reg, Immediate(compare_to_value));
8065 // Jump to successors[index] if value < case_value[index].
8066 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
8067 // Jump to successors[index + 1] if value == case_value[index + 1].
8068 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
8069 }
8070
8071 if (index != num_entries) {
8072 // There are an odd number of entries. Handle the last one.
8073 DCHECK_EQ(index + 1, num_entries);
8074 __ cmpl(value_reg, Immediate(lower_bound + index));
8075 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008076 }
8077
8078 // And the default for any other value.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008079 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
8080 __ jmp(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008081 }
8082}
8083
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008084void InstructionCodeGeneratorX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8085 int32_t lower_bound = switch_instr->GetStartValue();
8086 uint32_t num_entries = switch_instr->GetNumEntries();
8087 LocationSummary* locations = switch_instr->GetLocations();
8088 Register value_reg = locations->InAt(0).AsRegister<Register>();
8089
8090 GenPackedSwitchWithCompares(value_reg,
8091 lower_bound,
8092 num_entries,
8093 switch_instr->GetBlock(),
8094 switch_instr->GetDefaultBlock());
8095}
8096
Mark Mendell805b3b52015-09-18 14:10:29 -04008097void LocationsBuilderX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8098 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008099 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendell805b3b52015-09-18 14:10:29 -04008100 locations->SetInAt(0, Location::RequiresRegister());
8101
8102 // Constant area pointer.
8103 locations->SetInAt(1, Location::RequiresRegister());
8104
8105 // And the temporary we need.
8106 locations->AddTemp(Location::RequiresRegister());
8107}
8108
8109void InstructionCodeGeneratorX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8110 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008111 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendell805b3b52015-09-18 14:10:29 -04008112 LocationSummary* locations = switch_instr->GetLocations();
8113 Register value_reg = locations->InAt(0).AsRegister<Register>();
8114 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8115
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008116 if (num_entries <= kPackedSwitchJumpTableThreshold) {
8117 GenPackedSwitchWithCompares(value_reg,
8118 lower_bound,
8119 num_entries,
8120 switch_instr->GetBlock(),
8121 default_block);
8122 return;
8123 }
8124
Mark Mendell805b3b52015-09-18 14:10:29 -04008125 // Optimizing has a jump area.
8126 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
8127 Register constant_area = locations->InAt(1).AsRegister<Register>();
8128
8129 // Remove the bias, if needed.
8130 if (lower_bound != 0) {
8131 __ leal(temp_reg, Address(value_reg, -lower_bound));
8132 value_reg = temp_reg;
8133 }
8134
8135 // Is the value in range?
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008136 DCHECK_GE(num_entries, 1u);
Mark Mendell805b3b52015-09-18 14:10:29 -04008137 __ cmpl(value_reg, Immediate(num_entries - 1));
8138 __ j(kAbove, codegen_->GetLabelOf(default_block));
8139
8140 // We are in the range of the table.
8141 // Load (target-constant_area) from the jump table, indexing by the value.
8142 __ movl(temp_reg, codegen_->LiteralCaseTable(switch_instr, constant_area, value_reg));
8143
8144 // Compute the actual target address by adding in constant_area.
8145 __ addl(temp_reg, constant_area);
8146
8147 // And jump.
8148 __ jmp(temp_reg);
8149}
8150
Mark Mendell0616ae02015-04-17 12:49:27 -04008151void LocationsBuilderX86::VisitX86ComputeBaseMethodAddress(
8152 HX86ComputeBaseMethodAddress* insn) {
8153 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008154 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008155 locations->SetOut(Location::RequiresRegister());
8156}
8157
8158void InstructionCodeGeneratorX86::VisitX86ComputeBaseMethodAddress(
8159 HX86ComputeBaseMethodAddress* insn) {
8160 LocationSummary* locations = insn->GetLocations();
8161 Register reg = locations->Out().AsRegister<Register>();
8162
8163 // Generate call to next instruction.
8164 Label next_instruction;
8165 __ call(&next_instruction);
8166 __ Bind(&next_instruction);
8167
8168 // Remember this offset for later use with constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008169 codegen_->AddMethodAddressOffset(insn, GetAssembler()->CodeSize());
Mark Mendell0616ae02015-04-17 12:49:27 -04008170
8171 // Grab the return address off the stack.
8172 __ popl(reg);
8173}
8174
8175void LocationsBuilderX86::VisitX86LoadFromConstantTable(
8176 HX86LoadFromConstantTable* insn) {
8177 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008178 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008179
8180 locations->SetInAt(0, Location::RequiresRegister());
8181 locations->SetInAt(1, Location::ConstantLocation(insn->GetConstant()));
8182
8183 // If we don't need to be materialized, we only need the inputs to be set.
David Brazdilb3e773e2016-01-26 11:28:37 +00008184 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008185 return;
8186 }
8187
8188 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008189 case DataType::Type::kFloat32:
8190 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008191 locations->SetOut(Location::RequiresFpuRegister());
8192 break;
8193
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008194 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008195 locations->SetOut(Location::RequiresRegister());
8196 break;
8197
8198 default:
8199 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8200 }
8201}
8202
8203void InstructionCodeGeneratorX86::VisitX86LoadFromConstantTable(HX86LoadFromConstantTable* insn) {
David Brazdilb3e773e2016-01-26 11:28:37 +00008204 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008205 return;
8206 }
8207
8208 LocationSummary* locations = insn->GetLocations();
8209 Location out = locations->Out();
8210 Register const_area = locations->InAt(0).AsRegister<Register>();
8211 HConstant *value = insn->GetConstant();
8212
8213 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008214 case DataType::Type::kFloat32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008215 __ movss(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008216 codegen_->LiteralFloatAddress(
8217 value->AsFloatConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008218 break;
8219
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008220 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008221 __ movsd(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008222 codegen_->LiteralDoubleAddress(
8223 value->AsDoubleConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008224 break;
8225
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008226 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008227 __ movl(out.AsRegister<Register>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008228 codegen_->LiteralInt32Address(
8229 value->AsIntConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008230 break;
8231
8232 default:
8233 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8234 }
8235}
8236
Mark Mendell0616ae02015-04-17 12:49:27 -04008237/**
8238 * Class to handle late fixup of offsets into constant area.
8239 */
Vladimir Marko5233f932015-09-29 19:01:15 +01008240class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
Mark Mendell0616ae02015-04-17 12:49:27 -04008241 public:
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008242 RIPFixup(CodeGeneratorX86& codegen,
8243 HX86ComputeBaseMethodAddress* base_method_address,
8244 size_t offset)
8245 : codegen_(&codegen),
8246 base_method_address_(base_method_address),
8247 offset_into_constant_area_(offset) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008248
8249 protected:
8250 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
8251
8252 CodeGeneratorX86* codegen_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008253 HX86ComputeBaseMethodAddress* base_method_address_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008254
8255 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01008256 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell0616ae02015-04-17 12:49:27 -04008257 // Patch the correct offset for the instruction. The place to patch is the
8258 // last 4 bytes of the instruction.
8259 // The value to patch is the distance from the offset in the constant area
8260 // from the address computed by the HX86ComputeBaseMethodAddress instruction.
Mark Mendell805b3b52015-09-18 14:10:29 -04008261 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008262 int32_t relative_position =
8263 constant_offset - codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell0616ae02015-04-17 12:49:27 -04008264
8265 // Patch in the right value.
8266 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
8267 }
8268
Mark Mendell0616ae02015-04-17 12:49:27 -04008269 // Location in constant area that the fixup refers to.
Mark Mendell805b3b52015-09-18 14:10:29 -04008270 int32_t offset_into_constant_area_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008271};
8272
Mark Mendell805b3b52015-09-18 14:10:29 -04008273/**
8274 * Class to handle late fixup of offsets to a jump table that will be created in the
8275 * constant area.
8276 */
8277class JumpTableRIPFixup : public RIPFixup {
8278 public:
8279 JumpTableRIPFixup(CodeGeneratorX86& codegen, HX86PackedSwitch* switch_instr)
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008280 : RIPFixup(codegen, switch_instr->GetBaseMethodAddress(), static_cast<size_t>(-1)),
8281 switch_instr_(switch_instr) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008282
8283 void CreateJumpTable() {
8284 X86Assembler* assembler = codegen_->GetAssembler();
8285
8286 // Ensure that the reference to the jump table has the correct offset.
8287 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
8288 SetOffset(offset_in_constant_table);
8289
8290 // The label values in the jump table are computed relative to the
8291 // instruction addressing the constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008292 const int32_t relative_offset = codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell805b3b52015-09-18 14:10:29 -04008293
8294 // Populate the jump table with the correct values for the jump table.
8295 int32_t num_entries = switch_instr_->GetNumEntries();
8296 HBasicBlock* block = switch_instr_->GetBlock();
8297 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
8298 // The value that we want is the target offset - the position of the table.
8299 for (int32_t i = 0; i < num_entries; i++) {
8300 HBasicBlock* b = successors[i];
8301 Label* l = codegen_->GetLabelOf(b);
8302 DCHECK(l->IsBound());
8303 int32_t offset_to_block = l->Position() - relative_offset;
8304 assembler->AppendInt32(offset_to_block);
8305 }
8306 }
8307
8308 private:
8309 const HX86PackedSwitch* switch_instr_;
8310};
8311
8312void CodeGeneratorX86::Finalize(CodeAllocator* allocator) {
8313 // Generate the constant area if needed.
8314 X86Assembler* assembler = GetAssembler();
jaishank20d1c942019-03-08 15:08:17 +05308315
Mark Mendell805b3b52015-09-18 14:10:29 -04008316 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
8317 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8
8318 // byte values.
8319 assembler->Align(4, 0);
8320 constant_area_start_ = assembler->CodeSize();
8321
8322 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008323 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell805b3b52015-09-18 14:10:29 -04008324 jump_table->CreateJumpTable();
8325 }
8326
8327 // And now add the constant area to the generated code.
8328 assembler->AddConstantArea();
8329 }
8330
8331 // And finish up.
8332 CodeGenerator::Finalize(allocator);
8333}
8334
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008335Address CodeGeneratorX86::LiteralDoubleAddress(double v,
8336 HX86ComputeBaseMethodAddress* method_base,
8337 Register reg) {
8338 AssemblerFixup* fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008339 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddDouble(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008340 return Address(reg, kDummy32BitOffset, fixup);
8341}
8342
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008343Address CodeGeneratorX86::LiteralFloatAddress(float v,
8344 HX86ComputeBaseMethodAddress* method_base,
8345 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008346 AssemblerFixup* fixup =
8347 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddFloat(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008348 return Address(reg, kDummy32BitOffset, fixup);
8349}
8350
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008351Address CodeGeneratorX86::LiteralInt32Address(int32_t v,
8352 HX86ComputeBaseMethodAddress* method_base,
8353 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008354 AssemblerFixup* fixup =
8355 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt32(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008356 return Address(reg, kDummy32BitOffset, fixup);
8357}
8358
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008359Address CodeGeneratorX86::LiteralInt64Address(int64_t v,
8360 HX86ComputeBaseMethodAddress* method_base,
8361 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008362 AssemblerFixup* fixup =
8363 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt64(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008364 return Address(reg, kDummy32BitOffset, fixup);
8365}
8366
Aart Bika19616e2016-02-01 18:57:58 -08008367void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) {
8368 if (value == 0) {
8369 __ xorl(dest, dest);
8370 } else {
8371 __ movl(dest, Immediate(value));
8372 }
8373}
8374
8375void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) {
8376 if (value == 0) {
8377 __ testl(dest, dest);
8378 } else {
8379 __ cmpl(dest, Immediate(value));
8380 }
8381}
8382
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008383void CodeGeneratorX86::GenerateIntCompare(Location lhs, Location rhs) {
8384 Register lhs_reg = lhs.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07008385 GenerateIntCompare(lhs_reg, rhs);
8386}
8387
8388void CodeGeneratorX86::GenerateIntCompare(Register lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008389 if (rhs.IsConstant()) {
8390 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07008391 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008392 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07008393 __ cmpl(lhs, Address(ESP, rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008394 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07008395 __ cmpl(lhs, rhs.AsRegister<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008396 }
8397}
8398
8399Address CodeGeneratorX86::ArrayAddress(Register obj,
8400 Location index,
8401 ScaleFactor scale,
8402 uint32_t data_offset) {
8403 return index.IsConstant() ?
8404 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
8405 Address(obj, index.AsRegister<Register>(), scale, data_offset);
8406}
8407
Mark Mendell805b3b52015-09-18 14:10:29 -04008408Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
8409 Register reg,
8410 Register value) {
8411 // Create a fixup to be used to create and address the jump table.
8412 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008413 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell805b3b52015-09-18 14:10:29 -04008414
8415 // We have to populate the jump tables.
8416 fixups_to_jump_tables_.push_back(table_fixup);
8417
8418 // We want a scaled address, as we are extracting the correct offset from the table.
8419 return Address(reg, value, TIMES_4, kDummy32BitOffset, table_fixup);
8420}
8421
Andreas Gampe85b62f22015-09-09 13:15:38 -07008422// TODO: target as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008423void CodeGeneratorX86::MoveFromReturnRegister(Location target, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07008424 if (!target.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008425 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008426 return;
8427 }
8428
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008429 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008430
8431 Location return_loc = InvokeDexCallingConventionVisitorX86().GetReturnLocation(type);
8432 if (target.Equals(return_loc)) {
8433 return;
8434 }
8435
8436 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
8437 // with the else branch.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008438 if (type == DataType::Type::kInt64) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008439 HParallelMove parallel_move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008440 parallel_move.AddMove(return_loc.ToLow(), target.ToLow(), DataType::Type::kInt32, nullptr);
8441 parallel_move.AddMove(return_loc.ToHigh(), target.ToHigh(), DataType::Type::kInt32, nullptr);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008442 GetMoveResolver()->EmitNativeCode(&parallel_move);
8443 } else {
8444 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01008445 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07008446 parallel_move.AddMove(return_loc, target, type, nullptr);
8447 GetMoveResolver()->EmitNativeCode(&parallel_move);
8448 }
8449}
8450
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008451void CodeGeneratorX86::PatchJitRootUse(uint8_t* code,
8452 const uint8_t* roots_data,
8453 const PatchInfo<Label>& info,
8454 uint64_t index_in_table) const {
8455 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
8456 uintptr_t address =
8457 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00008458 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008459 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
8460 dchecked_integral_cast<uint32_t>(address);
8461}
8462
Nicolas Geoffray132d8362016-11-16 09:19:42 +00008463void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
8464 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008465 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01008466 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008467 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008468 }
8469
8470 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008471 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01008472 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008473 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00008474 }
8475}
8476
xueliang.zhonge0eb4832017-10-30 13:43:14 +00008477void LocationsBuilderX86::VisitIntermediateAddress(HIntermediateAddress* instruction
8478 ATTRIBUTE_UNUSED) {
8479 LOG(FATAL) << "Unreachable";
8480}
8481
8482void InstructionCodeGeneratorX86::VisitIntermediateAddress(HIntermediateAddress* instruction
8483 ATTRIBUTE_UNUSED) {
8484 LOG(FATAL) << "Unreachable";
8485}
8486
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05308487bool LocationsBuilderX86::CpuHasAvxFeatureFlag() {
8488 return codegen_->GetInstructionSetFeatures().HasAVX();
8489}
8490bool LocationsBuilderX86::CpuHasAvx2FeatureFlag() {
8491 return codegen_->GetInstructionSetFeatures().HasAVX2();
8492}
8493bool InstructionCodeGeneratorX86::CpuHasAvxFeatureFlag() {
8494 return codegen_->GetInstructionSetFeatures().HasAVX();
8495}
8496bool InstructionCodeGeneratorX86::CpuHasAvx2FeatureFlag() {
8497 return codegen_->GetInstructionSetFeatures().HasAVX2();
8498}
8499
Roland Levillain4d027112015-07-01 15:41:14 +01008500#undef __
8501
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00008502} // namespace x86
8503} // namespace art