blob: 797fe323d0e16c1712278eeb17092cc9de555049 [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010018
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +000024#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010025#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010026#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "intrinsics.h"
29#include "intrinsics_x86.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070031#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070032#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070033#include "mirror/class-inl.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010034#include "thread.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000035#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010036#include "utils/stack_checks.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000037#include "utils/x86/assembler_x86.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010038#include "utils/x86/managed_register_x86.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000039
Vladimir Marko0a516052019-10-14 13:00:44 +000040namespace art {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010041
Roland Levillain0d5a2812015-11-13 10:07:31 +000042template<class MirrorType>
43class GcRoot;
44
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000045namespace x86 {
46
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010047static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010048static constexpr Register kMethodRegisterArgument = EAX;
Mark Mendell5f874182015-03-04 15:42:45 -050049static constexpr Register kCoreCalleeSaves[] = { EBP, ESI, EDI };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010050
Mark Mendell24f2dfa2015-01-14 19:51:45 -050051static constexpr int kC2ConditionMask = 0x400;
52
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000053static constexpr int kFakeReturnRegister = Register(8);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +000054
Aart Bik1f8d51b2018-02-15 10:42:37 -080055static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
56static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
57
Vladimir Marko3232dbb2018-07-25 15:42:46 +010058static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
59 InvokeRuntimeCallingConvention calling_convention;
60 RegisterSet caller_saves = RegisterSet::Empty();
61 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
62 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
63 // that the the kPrimNot result register is the same as the first argument register.
64 return caller_saves;
65}
66
Roland Levillain7cbd27f2016-08-11 23:53:33 +010067// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
68#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070069#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010070
Andreas Gampe85b62f22015-09-09 13:15:38 -070071class NullCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010072 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000073 explicit NullCheckSlowPathX86(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010075 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +010076 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000078 if (instruction_->CanThrowIntoCatchBlock()) {
79 // Live registers will be restored in the catch block if caught.
80 SaveLiveRegisters(codegen, instruction_->GetLocations());
81 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010082 x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexandre Rames8158f282015-08-07 10:26:17 +010083 instruction_,
84 instruction_->GetDexPc(),
85 this);
Roland Levillain888d0672015-11-23 18:53:50 +000086 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010087 }
88
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010089 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010090
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010091 const char* GetDescription() const override { return "NullCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010092
Nicolas Geoffraye5038322014-07-04 09:41:32 +010093 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010094 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
95};
96
Andreas Gampe85b62f22015-09-09 13:15:38 -070097class DivZeroCheckSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000098 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000099 explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000100
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100101 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000103 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100104 x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000105 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000106 }
107
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100108 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100109
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100110 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100111
Calin Juravled0d48522014-11-04 16:40:20 +0000112 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000113 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
114};
115
Andreas Gampe85b62f22015-09-09 13:15:38 -0700116class DivRemMinusOneSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000117 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000118 DivRemMinusOneSlowPathX86(HInstruction* instruction, Register reg, bool is_div)
119 : SlowPathCode(instruction), reg_(reg), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000120
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100121 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000122 __ Bind(GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 if (is_div_) {
124 __ negl(reg_);
125 } else {
126 __ movl(reg_, Immediate(0));
127 }
Calin Juravled0d48522014-11-04 16:40:20 +0000128 __ jmp(GetExitLabel());
129 }
130
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100131 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100132
Calin Juravled0d48522014-11-04 16:40:20 +0000133 private:
134 Register reg_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 bool is_div_;
136 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
Calin Juravled0d48522014-11-04 16:40:20 +0000137};
138
Andreas Gampe85b62f22015-09-09 13:15:38 -0700139class BoundsCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100140 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000141 explicit BoundsCheckSlowPathX86(HBoundsCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100142
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100143 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100144 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100145 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100146 __ Bind(GetEntryLabel());
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000147 // We're moving two locations to locations that could overlap, so we need a parallel
148 // move resolver.
David Brazdil77a48ae2015-09-15 12:34:04 +0000149 if (instruction_->CanThrowIntoCatchBlock()) {
150 // Live registers will be restored in the catch block if caught.
151 SaveLiveRegisters(codegen, instruction_->GetLocations());
152 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400153
154 // Are we using an array length from memory?
155 HInstruction* array_length = instruction_->InputAt(1);
156 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100157 InvokeRuntimeCallingConvention calling_convention;
Mark Mendellee8d9712016-07-12 11:13:15 -0400158 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
159 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100160 HArrayLength* length = array_length->AsArrayLength();
161 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400162 Location array_loc = array_length->GetLocations()->InAt(0);
163 Address array_len(array_loc.AsRegister<Register>(), len_offset);
164 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
165 // Check for conflicts with index.
166 if (length_loc.Equals(locations->InAt(0))) {
167 // We know we aren't using parameter 2.
168 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
169 }
170 __ movl(length_loc.AsRegister<Register>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100171 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100172 __ shrl(length_loc.AsRegister<Register>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700173 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400174 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000175 x86_codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100176 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000177 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100178 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400179 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100180 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100181 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100182 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
183 ? kQuickThrowStringBounds
184 : kQuickThrowArrayBounds;
185 x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100186 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000187 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188 }
189
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100190 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100191
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100192 const char* GetDescription() const override { return "BoundsCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100193
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100194 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100195 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
196};
197
Andreas Gampe85b62f22015-09-09 13:15:38 -0700198class SuspendCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000199 public:
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000200 SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000201 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000202
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100203 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700204 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100205 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000206 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700207 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100208 x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000209 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700210 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100211 if (successor_ == nullptr) {
212 __ jmp(GetReturnLabel());
213 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100214 __ jmp(x86_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100215 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000216 }
217
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100218 Label* GetReturnLabel() {
219 DCHECK(successor_ == nullptr);
220 return &return_label_;
221 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000222
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100223 HBasicBlock* GetSuccessor() const {
224 return successor_;
225 }
226
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100227 const char* GetDescription() const override { return "SuspendCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100228
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000229 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100230 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000231 Label return_label_;
232
233 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
234};
235
Vladimir Markoaad75c62016-10-03 08:46:48 +0000236class LoadStringSlowPathX86 : public SlowPathCode {
237 public:
238 explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {}
239
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100240 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000241 LocationSummary* locations = instruction_->GetLocations();
242 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
243
244 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
245 __ Bind(GetEntryLabel());
246 SaveLiveRegisters(codegen, locations);
247
248 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000249 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
250 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000251 x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
252 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
253 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
254 RestoreLiveRegisters(codegen, locations);
255
Vladimir Markoaad75c62016-10-03 08:46:48 +0000256 __ jmp(GetExitLabel());
257 }
258
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100259 const char* GetDescription() const override { return "LoadStringSlowPathX86"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000260
261 private:
262 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
263};
264
Andreas Gampe85b62f22015-09-09 13:15:38 -0700265class LoadClassSlowPathX86 : public SlowPathCode {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100267 LoadClassSlowPathX86(HLoadClass* cls, HInstruction* at)
268 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100270 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 }
272
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100273 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000274 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100275 Location out = locations->Out();
276 const uint32_t dex_pc = instruction_->GetDexPc();
277 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
278 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
279
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000280 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
281 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000282 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000283
284 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100285 if (must_resolve_type) {
286 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_codegen->GetGraph()->GetDexFile()));
287 dex::TypeIndex type_index = cls_->GetTypeIndex();
288 __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100289 x86_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
290 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100291 // If we also must_do_clinit, the resolved type is now in the correct register.
292 } else {
293 DCHECK(must_do_clinit);
294 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
295 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), source);
296 }
297 if (must_do_clinit) {
298 x86_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
299 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000300 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000301
302 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000303 if (out.IsValid()) {
304 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
305 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000306 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000307 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000308 __ jmp(GetExitLabel());
309 }
310
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100311 const char* GetDescription() const override { return "LoadClassSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100312
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000313 private:
314 // The class this slow path will load.
315 HLoadClass* const cls_;
316
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000317 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
318};
319
Andreas Gampe85b62f22015-09-09 13:15:38 -0700320class TypeCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000321 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000322 TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000323 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000324
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100325 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000327 DCHECK(instruction_->IsCheckCast()
328 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000329
330 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
331 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000332
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000333 if (kPoisonHeapReferences &&
334 instruction_->IsCheckCast() &&
335 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
336 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
337 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<Register>());
338 }
339
Vladimir Marko87584542017-12-12 17:47:52 +0000340 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000341 SaveLiveRegisters(codegen, locations);
342 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343
344 // We're moving two locations to locations that could overlap, so we need a parallel
345 // move resolver.
346 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800347 x86_codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800348 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100349 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800350 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800351 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100352 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000353 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100354 x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Alexandre Rames8158f282015-08-07 10:26:17 +0100355 instruction_,
356 instruction_->GetDexPc(),
357 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800358 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 x86_codegen->InvokeRuntime(kQuickCheckInstanceOf,
362 instruction_,
363 instruction_->GetDexPc(),
364 this);
365 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 }
367
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 if (!is_fatal_) {
369 if (instruction_->IsInstanceOf()) {
370 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
371 }
372 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray75374372015-09-17 17:12:19 +0000373
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000374 __ jmp(GetExitLabel());
375 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000376 }
377
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100378 const char* GetDescription() const override { return "TypeCheckSlowPathX86"; }
379 bool IsFatal() const override { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100380
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000382 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000383
384 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
385};
386
Andreas Gampe85b62f22015-09-09 13:15:38 -0700387class DeoptimizationSlowPathX86 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700388 public:
Aart Bik42249c32016-01-07 15:33:50 -0800389 explicit DeoptimizationSlowPathX86(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000390 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700391
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100392 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100393 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700394 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100395 LocationSummary* locations = instruction_->GetLocations();
396 SaveLiveRegisters(codegen, locations);
397 InvokeRuntimeCallingConvention calling_convention;
398 x86_codegen->Load32BitValue(
399 calling_convention.GetRegisterAt(0),
400 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100401 x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100402 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 }
404
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100405 const char* GetDescription() const override { return "DeoptimizationSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100406
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700408 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
409};
410
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100411class ArraySetSlowPathX86 : public SlowPathCode {
412 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000413 explicit ArraySetSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100414
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100415 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100416 LocationSummary* locations = instruction_->GetLocations();
417 __ Bind(GetEntryLabel());
418 SaveLiveRegisters(codegen, locations);
419
420 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100421 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100422 parallel_move.AddMove(
423 locations->InAt(0),
424 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100425 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100426 nullptr);
427 parallel_move.AddMove(
428 locations->InAt(1),
429 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100430 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100431 nullptr);
432 parallel_move.AddMove(
433 locations->InAt(2),
434 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100435 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100436 nullptr);
437 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
438
439 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100440 x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000441 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100442 RestoreLiveRegisters(codegen, locations);
443 __ jmp(GetExitLabel());
444 }
445
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100446 const char* GetDescription() const override { return "ArraySetSlowPathX86"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100447
448 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100449 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86);
450};
451
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100452// Slow path marking an object reference `ref` during a read
453// barrier. The field `obj.field` in the object `obj` holding this
454// reference does not get updated by this slow path after marking (see
455// ReadBarrierMarkAndUpdateFieldSlowPathX86 below for that).
456//
457// This means that after the execution of this slow path, `ref` will
458// always be up-to-date, but `obj.field` may not; i.e., after the
459// flip, `ref` will be a to-space reference, but `obj.field` will
460// probably still be a from-space reference (unless it gets updated by
461// another thread, or if another thread installed another object
462// reference (different from `ref`) in `obj.field`).
Roland Levillain7c1559a2015-12-15 10:55:36 +0000463class ReadBarrierMarkSlowPathX86 : public SlowPathCode {
464 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100465 ReadBarrierMarkSlowPathX86(HInstruction* instruction,
466 Location ref,
467 bool unpoison_ref_before_marking)
468 : SlowPathCode(instruction),
469 ref_(ref),
470 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000471 DCHECK(kEmitCompilerReadBarrier);
472 }
473
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100474 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86"; }
Roland Levillain7c1559a2015-12-15 10:55:36 +0000475
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100476 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000477 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100478 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +0000479 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000481 DCHECK(instruction_->IsInstanceFieldGet() ||
482 instruction_->IsStaticFieldGet() ||
483 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100484 instruction_->IsArraySet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000485 instruction_->IsLoadClass() ||
486 instruction_->IsLoadString() ||
487 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100488 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100489 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
490 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000491 << "Unexpected instruction in read barrier marking slow path: "
492 << instruction_->DebugName();
493
494 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100495 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000496 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100497 __ MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000498 }
Roland Levillain4359e612016-07-20 11:32:19 +0100499 // No need to save live registers; it's taken care of by the
500 // entrypoint. Also, there is no need to update the stack mask,
501 // as this runtime call will not trigger a garbage collection.
Roland Levillain7c1559a2015-12-15 10:55:36 +0000502 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 DCHECK_NE(ref_reg, ESP);
504 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100505 // "Compact" slow path, saving two moves.
506 //
507 // Instead of using the standard runtime calling convention (input
508 // and output in EAX):
509 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100510 // EAX <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100511 // EAX <- ReadBarrierMark(EAX)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100512 // ref <- EAX
Roland Levillain02b75802016-07-13 11:54:35 +0100513 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100514 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100515 // of a dedicated entrypoint:
516 //
517 // rX <- ReadBarrierMarkRegX(rX)
518 //
Roland Levillain97c46462017-05-11 14:04:03 +0100519 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100520 // This runtime call does not require a stack map.
521 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000522 __ jmp(GetExitLabel());
523 }
524
525 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 // The location (register) of the marked object reference.
527 const Location ref_;
528 // Should the reference in `ref_` be unpoisoned prior to marking it?
529 const bool unpoison_ref_before_marking_;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000530
531 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86);
532};
533
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100534// Slow path marking an object reference `ref` during a read barrier,
535// and if needed, atomically updating the field `obj.field` in the
536// object `obj` holding this reference after marking (contrary to
537// ReadBarrierMarkSlowPathX86 above, which never tries to update
538// `obj.field`).
539//
540// This means that after the execution of this slow path, both `ref`
541// and `obj.field` will be up-to-date; i.e., after the flip, both will
542// hold the same to-space reference (unless another thread installed
543// another object reference (different from `ref`) in `obj.field`).
544class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
545 public:
546 ReadBarrierMarkAndUpdateFieldSlowPathX86(HInstruction* instruction,
547 Location ref,
548 Register obj,
549 const Address& field_addr,
550 bool unpoison_ref_before_marking,
551 Register temp)
552 : SlowPathCode(instruction),
553 ref_(ref),
554 obj_(obj),
555 field_addr_(field_addr),
556 unpoison_ref_before_marking_(unpoison_ref_before_marking),
557 temp_(temp) {
558 DCHECK(kEmitCompilerReadBarrier);
559 }
560
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100561 const char* GetDescription() const override { return "ReadBarrierMarkAndUpdateFieldSlowPathX86"; }
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100562
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100563 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100564 LocationSummary* locations = instruction_->GetLocations();
565 Register ref_reg = ref_.AsRegister<Register>();
566 DCHECK(locations->CanCall());
567 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
568 // This slow path is only used by the UnsafeCASObject intrinsic.
569 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
570 << "Unexpected instruction in read barrier marking and field updating slow path: "
571 << instruction_->DebugName();
572 DCHECK(instruction_->GetLocations()->Intrinsified());
573 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
574
575 __ Bind(GetEntryLabel());
576 if (unpoison_ref_before_marking_) {
577 // Object* ref = ref_addr->AsMirrorPtr()
578 __ MaybeUnpoisonHeapReference(ref_reg);
579 }
580
581 // Save the old (unpoisoned) reference.
582 __ movl(temp_, ref_reg);
583
584 // No need to save live registers; it's taken care of by the
585 // entrypoint. Also, there is no need to update the stack mask,
586 // as this runtime call will not trigger a garbage collection.
587 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
588 DCHECK_NE(ref_reg, ESP);
589 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
590 // "Compact" slow path, saving two moves.
591 //
592 // Instead of using the standard runtime calling convention (input
593 // and output in EAX):
594 //
595 // EAX <- ref
596 // EAX <- ReadBarrierMark(EAX)
597 // ref <- EAX
598 //
599 // we just use rX (the register containing `ref`) as input and output
600 // of a dedicated entrypoint:
601 //
602 // rX <- ReadBarrierMarkRegX(rX)
603 //
Roland Levillain97c46462017-05-11 14:04:03 +0100604 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100605 // This runtime call does not require a stack map.
606 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
607
608 // If the new reference is different from the old reference,
609 // update the field in the holder (`*field_addr`).
610 //
611 // Note that this field could also hold a different object, if
612 // another thread had concurrently changed it. In that case, the
613 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
614 // operation below would abort the CAS, leaving the field as-is.
615 NearLabel done;
616 __ cmpl(temp_, ref_reg);
617 __ j(kEqual, &done);
618
619 // Update the the holder's field atomically. This may fail if
620 // mutator updates before us, but it's OK. This is achieved
621 // using a strong compare-and-set (CAS) operation with relaxed
622 // memory synchronization ordering, where the expected value is
623 // the old reference and the desired value is the new reference.
624 // This operation is implemented with a 32-bit LOCK CMPXLCHG
625 // instruction, which requires the expected value (the old
626 // reference) to be in EAX. Save EAX beforehand, and move the
627 // expected value (stored in `temp_`) into EAX.
628 __ pushl(EAX);
629 __ movl(EAX, temp_);
630
631 // Convenience aliases.
632 Register base = obj_;
633 Register expected = EAX;
634 Register value = ref_reg;
635
636 bool base_equals_value = (base == value);
637 if (kPoisonHeapReferences) {
638 if (base_equals_value) {
639 // If `base` and `value` are the same register location, move
640 // `value` to a temporary register. This way, poisoning
641 // `value` won't invalidate `base`.
642 value = temp_;
643 __ movl(value, base);
644 }
645
646 // Check that the register allocator did not assign the location
647 // of `expected` (EAX) to `value` nor to `base`, so that heap
648 // poisoning (when enabled) works as intended below.
649 // - If `value` were equal to `expected`, both references would
650 // be poisoned twice, meaning they would not be poisoned at
651 // all, as heap poisoning uses address negation.
652 // - If `base` were equal to `expected`, poisoning `expected`
653 // would invalidate `base`.
654 DCHECK_NE(value, expected);
655 DCHECK_NE(base, expected);
656
657 __ PoisonHeapReference(expected);
658 __ PoisonHeapReference(value);
659 }
660
661 __ LockCmpxchgl(field_addr_, value);
662
663 // If heap poisoning is enabled, we need to unpoison the values
664 // that were poisoned earlier.
665 if (kPoisonHeapReferences) {
666 if (base_equals_value) {
667 // `value` has been moved to a temporary register, no need
668 // to unpoison it.
669 } else {
670 __ UnpoisonHeapReference(value);
671 }
672 // No need to unpoison `expected` (EAX), as it is be overwritten below.
673 }
674
675 // Restore EAX.
676 __ popl(EAX);
677
678 __ Bind(&done);
679 __ jmp(GetExitLabel());
680 }
681
682 private:
683 // The location (register) of the marked object reference.
684 const Location ref_;
685 // The register containing the object holding the marked object reference field.
686 const Register obj_;
687 // The address of the marked reference field. The base of this address must be `obj_`.
688 const Address field_addr_;
689
690 // Should the reference in `ref_` be unpoisoned prior to marking it?
691 const bool unpoison_ref_before_marking_;
692
693 const Register temp_;
694
695 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86);
696};
697
Roland Levillain0d5a2812015-11-13 10:07:31 +0000698// Slow path generating a read barrier for a heap reference.
699class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
700 public:
701 ReadBarrierForHeapReferenceSlowPathX86(HInstruction* instruction,
702 Location out,
703 Location ref,
704 Location obj,
705 uint32_t offset,
706 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000707 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000708 out_(out),
709 ref_(ref),
710 obj_(obj),
711 offset_(offset),
712 index_(index) {
713 DCHECK(kEmitCompilerReadBarrier);
714 // If `obj` is equal to `out` or `ref`, it means the initial object
715 // has been overwritten by (or after) the heap object reference load
716 // to be instrumented, e.g.:
717 //
718 // __ movl(out, Address(out, offset));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000719 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000720 //
721 // In that case, we have lost the information about the original
722 // object, and the emitted read barrier cannot work properly.
723 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
724 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
725 }
726
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100727 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000728 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
729 LocationSummary* locations = instruction_->GetLocations();
730 Register reg_out = out_.AsRegister<Register>();
731 DCHECK(locations->CanCall());
732 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +0100733 DCHECK(instruction_->IsInstanceFieldGet() ||
734 instruction_->IsStaticFieldGet() ||
735 instruction_->IsArrayGet() ||
736 instruction_->IsInstanceOf() ||
737 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700738 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000739 << "Unexpected instruction in read barrier for heap reference slow path: "
740 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000741
742 __ Bind(GetEntryLabel());
743 SaveLiveRegisters(codegen, locations);
744
745 // We may have to change the index's value, but as `index_` is a
746 // constant member (like other "inputs" of this slow path),
747 // introduce a copy of it, `index`.
748 Location index = index_;
749 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100750 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000751 if (instruction_->IsArrayGet()) {
752 // Compute the actual memory offset and store it in `index`.
753 Register index_reg = index_.AsRegister<Register>();
754 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
755 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
756 // We are about to change the value of `index_reg` (see the
757 // calls to art::x86::X86Assembler::shll and
758 // art::x86::X86Assembler::AddImmediate below), but it has
759 // not been saved by the previous call to
760 // art::SlowPathCode::SaveLiveRegisters, as it is a
761 // callee-save register --
762 // art::SlowPathCode::SaveLiveRegisters does not consider
763 // callee-save registers, as it has been designed with the
764 // assumption that callee-save registers are supposed to be
765 // handled by the called function. So, as a callee-save
766 // register, `index_reg` _would_ eventually be saved onto
767 // the stack, but it would be too late: we would have
768 // changed its value earlier. Therefore, we manually save
769 // it here into another freely available register,
770 // `free_reg`, chosen of course among the caller-save
771 // registers (as a callee-save `free_reg` register would
772 // exhibit the same problem).
773 //
774 // Note we could have requested a temporary register from
775 // the register allocator instead; but we prefer not to, as
776 // this is a slow path, and we know we can find a
777 // caller-save register that is available.
778 Register free_reg = FindAvailableCallerSaveRegister(codegen);
779 __ movl(free_reg, index_reg);
780 index_reg = free_reg;
781 index = Location::RegisterLocation(index_reg);
782 } else {
783 // The initial register stored in `index_` has already been
784 // saved in the call to art::SlowPathCode::SaveLiveRegisters
785 // (as it is not a callee-save register), so we can freely
786 // use it.
787 }
788 // Shifting the index value contained in `index_reg` by the scale
789 // factor (2) cannot overflow in practice, as the runtime is
790 // unable to allocate object arrays with a size larger than
791 // 2^26 - 1 (that is, 2^28 - 4 bytes).
792 __ shll(index_reg, Immediate(TIMES_4));
793 static_assert(
794 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
795 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
796 __ AddImmediate(index_reg, Immediate(offset_));
797 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100798 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
799 // intrinsics, `index_` is not shifted by a scale factor of 2
800 // (as in the case of ArrayGet), as it is actually an offset
801 // to an object field within an object.
802 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000803 DCHECK(instruction_->GetLocations()->Intrinsified());
804 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
805 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
806 << instruction_->AsInvoke()->GetIntrinsic();
807 DCHECK_EQ(offset_, 0U);
808 DCHECK(index_.IsRegisterPair());
809 // UnsafeGet's offset location is a register pair, the low
810 // part contains the correct offset.
811 index = index_.ToLow();
812 }
813 }
814
815 // We're moving two or three locations to locations that could
816 // overlap, so we need a parallel move resolver.
817 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100818 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000819 parallel_move.AddMove(ref_,
820 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100821 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000822 nullptr);
823 parallel_move.AddMove(obj_,
824 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100825 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000826 nullptr);
827 if (index.IsValid()) {
828 parallel_move.AddMove(index,
829 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100830 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000831 nullptr);
832 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
833 } else {
834 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
835 __ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
836 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100837 x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000838 CheckEntrypointTypes<
839 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
840 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
841
842 RestoreLiveRegisters(codegen, locations);
843 __ jmp(GetExitLabel());
844 }
845
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100846 const char* GetDescription() const override { return "ReadBarrierForHeapReferenceSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000847
848 private:
849 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
850 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
851 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
852 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
853 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
854 return static_cast<Register>(i);
855 }
856 }
857 // We shall never fail to find a free caller-save register, as
858 // there are more than two core caller-save registers on x86
859 // (meaning it is possible to find one which is different from
860 // `ref` and `obj`).
861 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
862 LOG(FATAL) << "Could not find a free caller-save register";
863 UNREACHABLE();
864 }
865
Roland Levillain0d5a2812015-11-13 10:07:31 +0000866 const Location out_;
867 const Location ref_;
868 const Location obj_;
869 const uint32_t offset_;
870 // An additional location containing an index to an array.
871 // Only used for HArrayGet and the UnsafeGetObject &
872 // UnsafeGetObjectVolatile intrinsics.
873 const Location index_;
874
875 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86);
876};
877
878// Slow path generating a read barrier for a GC root.
879class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
880 public:
881 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000882 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000883 DCHECK(kEmitCompilerReadBarrier);
884 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000885
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100886 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000887 LocationSummary* locations = instruction_->GetLocations();
888 Register reg_out = out_.AsRegister<Register>();
889 DCHECK(locations->CanCall());
890 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000891 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
892 << "Unexpected instruction in read barrier for GC root slow path: "
893 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000894
895 __ Bind(GetEntryLabel());
896 SaveLiveRegisters(codegen, locations);
897
898 InvokeRuntimeCallingConvention calling_convention;
899 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
900 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100901 x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000902 instruction_,
903 instruction_->GetDexPc(),
904 this);
905 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
906 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
907
908 RestoreLiveRegisters(codegen, locations);
909 __ jmp(GetExitLabel());
910 }
911
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100912 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000913
914 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000915 const Location out_;
916 const Location root_;
917
918 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86);
919};
920
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100921#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100922// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
923#define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100924
Aart Bike9f37602015-10-09 11:15:55 -0700925inline Condition X86Condition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700926 switch (cond) {
927 case kCondEQ: return kEqual;
928 case kCondNE: return kNotEqual;
929 case kCondLT: return kLess;
930 case kCondLE: return kLessEqual;
931 case kCondGT: return kGreater;
932 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700933 case kCondB: return kBelow;
934 case kCondBE: return kBelowEqual;
935 case kCondA: return kAbove;
936 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700937 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100938 LOG(FATAL) << "Unreachable";
939 UNREACHABLE();
940}
941
Aart Bike9f37602015-10-09 11:15:55 -0700942// Maps signed condition to unsigned condition and FP condition to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100943inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
944 switch (cond) {
945 case kCondEQ: return kEqual;
946 case kCondNE: return kNotEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700947 // Signed to unsigned, and FP to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100948 case kCondLT: return kBelow;
949 case kCondLE: return kBelowEqual;
950 case kCondGT: return kAbove;
951 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700952 // Unsigned remain unchanged.
953 case kCondB: return kBelow;
954 case kCondBE: return kBelowEqual;
955 case kCondA: return kAbove;
956 case kCondAE: return kAboveEqual;
Roland Levillain4fa13f62015-07-06 18:11:54 +0100957 }
958 LOG(FATAL) << "Unreachable";
959 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700960}
961
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100962void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100963 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100964}
965
966void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100967 stream << XmmRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100968}
969
Vladimir Markoa0431112018-06-25 09:32:54 +0100970const X86InstructionSetFeatures& CodeGeneratorX86::GetInstructionSetFeatures() const {
971 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86InstructionSetFeatures();
972}
973
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100974size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
975 __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
976 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100977}
978
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100979size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
980 __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
981 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100982}
983
Mark Mendell7c8d0092015-01-26 11:21:33 -0500984size_t CodeGeneratorX86::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700985 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700986 __ movups(Address(ESP, stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -0700987 } else {
988 __ movsd(Address(ESP, stack_index), XmmRegister(reg_id));
989 }
Artem Serov6a0b6572019-07-26 20:38:37 +0100990 return GetSlowPathFPWidth();
Mark Mendell7c8d0092015-01-26 11:21:33 -0500991}
992
993size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700994 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700995 __ movups(XmmRegister(reg_id), Address(ESP, stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -0700996 } else {
997 __ movsd(XmmRegister(reg_id), Address(ESP, stack_index));
998 }
Artem Serov6a0b6572019-07-26 20:38:37 +0100999 return GetSlowPathFPWidth();
Mark Mendell7c8d0092015-01-26 11:21:33 -05001000}
1001
Calin Juravle175dc732015-08-25 15:42:32 +01001002void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
1003 HInstruction* instruction,
1004 uint32_t dex_pc,
1005 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001006 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001007 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
1008 if (EntrypointRequiresStackMap(entrypoint)) {
1009 RecordPcInfo(instruction, dex_pc, slow_path);
1010 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001011}
1012
Roland Levillaindec8f632016-07-22 17:10:06 +01001013void CodeGeneratorX86::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1014 HInstruction* instruction,
1015 SlowPathCode* slow_path) {
1016 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001017 GenerateInvokeRuntime(entry_point_offset);
1018}
1019
1020void CodeGeneratorX86::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001021 __ fs()->call(Address::Absolute(entry_point_offset));
1022}
1023
Mark Mendellfb8d2792015-03-31 22:16:59 -04001024CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001025 const CompilerOptions& compiler_options,
1026 OptimizingCompilerStats* stats)
Mark Mendell5f874182015-03-04 15:42:45 -05001027 : CodeGenerator(graph,
1028 kNumberOfCpuRegisters,
1029 kNumberOfXmmRegisters,
1030 kNumberOfRegisterPairs,
1031 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1032 arraysize(kCoreCalleeSaves))
1033 | (1 << kFakeReturnRegister),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001034 0,
1035 compiler_options,
1036 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001037 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001038 location_builder_(graph, this),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001039 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001040 move_resolver_(graph->GetAllocator(), this),
1041 assembler_(graph->GetAllocator()),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001042 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1043 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1044 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1045 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001046 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001047 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001048 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001049 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1050 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko93205e32016-04-13 11:59:46 +01001051 constant_area_start_(-1),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001052 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001053 method_address_offset_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001054 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001055 // Use a fake return address register to mimic Quick.
1056 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001057}
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001058
David Brazdil58282f42016-01-14 12:45:10 +00001059void CodeGeneratorX86::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001060 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001061 blocked_core_registers_[ESP] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001062}
1063
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001064InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001065 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001066 assembler_(codegen->GetAssembler()),
1067 codegen_(codegen) {}
1068
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001069static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001070 return dwarf::Reg::X86Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001071}
1072
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001073void CodeGeneratorX86::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001074 __ cfi().SetCurrentCFAOffset(kX86WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001075 __ Bind(&frame_entry_label_);
Roland Levillain199f3362014-11-27 17:15:16 +00001076 bool skip_overflow_check =
1077 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001078 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Calin Juravle93edf732015-01-20 20:14:07 +00001079
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001080 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001081 NearLabel overflow;
1082 __ cmpw(Address(kMethodRegisterArgument,
1083 ArtMethod::HotnessCountOffset().Int32Value()),
1084 Immediate(ArtMethod::MaxCounter()));
1085 __ j(kEqual, &overflow);
1086 __ addw(Address(kMethodRegisterArgument,
1087 ArtMethod::HotnessCountOffset().Int32Value()),
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001088 Immediate(1));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001089 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001090 }
1091
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001092 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001093 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86);
1094 __ testl(EAX, Address(ESP, -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001095 RecordPcInfo(nullptr, 0);
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001096 }
1097
Mark Mendell5f874182015-03-04 15:42:45 -05001098 if (HasEmptyFrame()) {
1099 return;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001100 }
Mark Mendell5f874182015-03-04 15:42:45 -05001101
1102 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1103 Register reg = kCoreCalleeSaves[i];
1104 if (allocated_registers_.ContainsCoreRegister(reg)) {
1105 __ pushl(reg);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001106 __ cfi().AdjustCFAOffset(kX86WordSize);
1107 __ cfi().RelOffset(DWARFReg(reg), 0);
Mark Mendell5f874182015-03-04 15:42:45 -05001108 }
1109 }
1110
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001111 int adjust = GetFrameSize() - FrameEntrySpillSize();
1112 __ subl(ESP, Immediate(adjust));
1113 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001114 // Save the current method if we need it. Note that we do not
1115 // do this in HCurrentMethod, as the instruction might have been removed
1116 // in the SSA graph.
1117 if (RequiresCurrentMethod()) {
1118 __ movl(Address(ESP, kCurrentMethodStackOffset), kMethodRegisterArgument);
1119 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001120
1121 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1122 // Initialize should_deoptimize flag to 0.
1123 __ movl(Address(ESP, GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1124 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001125}
1126
1127void CodeGeneratorX86::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001128 __ cfi().RememberState();
1129 if (!HasEmptyFrame()) {
1130 int adjust = GetFrameSize() - FrameEntrySpillSize();
1131 __ addl(ESP, Immediate(adjust));
1132 __ cfi().AdjustCFAOffset(-adjust);
Mark Mendell5f874182015-03-04 15:42:45 -05001133
David Srbeckyc34dc932015-04-12 09:27:43 +01001134 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1135 Register reg = kCoreCalleeSaves[i];
1136 if (allocated_registers_.ContainsCoreRegister(reg)) {
1137 __ popl(reg);
1138 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86WordSize));
1139 __ cfi().Restore(DWARFReg(reg));
1140 }
Mark Mendell5f874182015-03-04 15:42:45 -05001141 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001142 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001143 __ ret();
1144 __ cfi().RestoreState();
1145 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001146}
1147
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001148void CodeGeneratorX86::Bind(HBasicBlock* block) {
1149 __ Bind(GetLabelOf(block));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001150}
1151
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001152Location InvokeDexCallingConventionVisitorX86::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001153 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001154 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001155 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001156 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001157 case DataType::Type::kInt8:
1158 case DataType::Type::kUint16:
1159 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08001160 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001161 case DataType::Type::kInt32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001162 return Location::RegisterLocation(EAX);
1163
Aart Bik66c158e2018-01-31 12:55:04 -08001164 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001165 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001166 return Location::RegisterPairLocation(EAX, EDX);
1167
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001168 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001169 return Location::NoLocation();
1170
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001171 case DataType::Type::kFloat64:
1172 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001173 return Location::FpuRegisterLocation(XMM0);
1174 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01001175
1176 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001177}
1178
1179Location InvokeDexCallingConventionVisitorX86::GetMethodLocation() const {
1180 return Location::RegisterLocation(kMethodRegisterArgument);
1181}
1182
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001183Location InvokeDexCallingConventionVisitorX86::GetNextLocation(DataType::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001184 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001185 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001186 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001187 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001188 case DataType::Type::kInt8:
1189 case DataType::Type::kUint16:
1190 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001191 case DataType::Type::kInt32: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001192 uint32_t index = gp_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001193 stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001194 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001195 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001196 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001197 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001198 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001199 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001200
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001201 case DataType::Type::kInt64: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001202 uint32_t index = gp_index_;
1203 gp_index_ += 2;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001204 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001205 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001206 X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
1207 calling_convention.GetRegisterPairAt(index));
1208 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001209 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001210 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
1211 }
1212 }
1213
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001214 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001215 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001216 stack_index_++;
1217 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1218 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1219 } else {
1220 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
1221 }
1222 }
1223
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001224 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001225 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001226 stack_index_ += 2;
1227 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1228 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1229 } else {
1230 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001231 }
1232 }
1233
Aart Bik66c158e2018-01-31 12:55:04 -08001234 case DataType::Type::kUint32:
1235 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001236 case DataType::Type::kVoid:
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001237 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08001238 UNREACHABLE();
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001239 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001240 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001241}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001242
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001243void CodeGeneratorX86::Move32(Location destination, Location source) {
1244 if (source.Equals(destination)) {
1245 return;
1246 }
1247 if (destination.IsRegister()) {
1248 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001249 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001250 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001251 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001252 } else {
1253 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001254 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001255 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001256 } else if (destination.IsFpuRegister()) {
1257 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001258 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001259 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001260 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001261 } else {
1262 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001263 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001264 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001265 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001266 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001267 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001268 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001269 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001270 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05001271 } else if (source.IsConstant()) {
1272 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001273 int32_t value = GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05001274 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001275 } else {
1276 DCHECK(source.IsStackSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001277 __ pushl(Address(ESP, source.GetStackIndex()));
1278 __ popl(Address(ESP, destination.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001279 }
1280 }
1281}
1282
1283void CodeGeneratorX86::Move64(Location destination, Location source) {
1284 if (source.Equals(destination)) {
1285 return;
1286 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001287 if (destination.IsRegisterPair()) {
1288 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001289 EmitParallelMoves(
1290 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
1291 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001292 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001293 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001294 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 DataType::Type::kInt32);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001296 } else if (source.IsFpuRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001297 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
1298 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
1299 __ psrlq(src_reg, Immediate(32));
1300 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001301 } else {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001302 // No conflict possible, so just do the moves.
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001303 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001304 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
1305 __ movl(destination.AsRegisterPairHigh<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001306 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
1307 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001308 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05001309 if (source.IsFpuRegister()) {
1310 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
1311 } else if (source.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001312 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001313 } else if (source.IsRegisterPair()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001314 size_t elem_size = DataType::Size(DataType::Type::kInt32);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001315 // Create stack space for 2 elements.
1316 __ subl(ESP, Immediate(2 * elem_size));
1317 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
1318 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
1319 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
1320 // And remove the temporary stack space we allocated.
1321 __ addl(ESP, Immediate(2 * elem_size));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001322 } else {
1323 LOG(FATAL) << "Unimplemented";
1324 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001325 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001326 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001327 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001328 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001329 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001330 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001331 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001332 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001333 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001334 } else if (source.IsConstant()) {
1335 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001336 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1337 int64_t value = GetInt64ValueOf(constant);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001338 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(Low32Bits(value)));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001339 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
1340 Immediate(High32Bits(value)));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001341 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001342 DCHECK(source.IsDoubleStackSlot()) << source;
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001343 EmitParallelMoves(
1344 Location::StackSlot(source.GetStackIndex()),
1345 Location::StackSlot(destination.GetStackIndex()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001346 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001347 Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001348 Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349 DataType::Type::kInt32);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001350 }
1351 }
1352}
1353
Calin Juravle175dc732015-08-25 15:42:32 +01001354void CodeGeneratorX86::MoveConstant(Location location, int32_t value) {
1355 DCHECK(location.IsRegister());
1356 __ movl(location.AsRegister<Register>(), Immediate(value));
1357}
1358
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001359void CodeGeneratorX86::MoveLocation(Location dst, Location src, DataType::Type dst_type) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001360 HParallelMove move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001361 if (dst_type == DataType::Type::kInt64 && !src.IsConstant() && !src.IsFpuRegister()) {
1362 move.AddMove(src.ToLow(), dst.ToLow(), DataType::Type::kInt32, nullptr);
1363 move.AddMove(src.ToHigh(), dst.ToHigh(), DataType::Type::kInt32, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001364 } else {
David Brazdil74eb1b22015-12-14 11:44:01 +00001365 move.AddMove(src, dst, dst_type, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001366 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001367 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001368}
1369
1370void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
1371 if (location.IsRegister()) {
1372 locations->AddTemp(location);
1373 } else if (location.IsRegisterPair()) {
1374 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1375 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
1376 } else {
1377 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1378 }
1379}
1380
David Brazdilfc6a86a2015-06-26 10:33:45 +00001381void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001382 if (successor->IsExitBlock()) {
1383 DCHECK(got->GetPrevious()->AlwaysThrows());
1384 return; // no code needed
1385 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001386
1387 HBasicBlock* block = got->GetBlock();
1388 HInstruction* previous = got->GetPrevious();
1389
1390 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001391 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001392 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
1393 __ pushl(EAX);
1394 __ movl(EAX, Address(ESP, kX86WordSize));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001395 NearLabel overflow;
1396 __ cmpw(Address(EAX, ArtMethod::HotnessCountOffset().Int32Value()),
1397 Immediate(ArtMethod::MaxCounter()));
1398 __ j(kEqual, &overflow);
1399 __ addw(Address(EAX, ArtMethod::HotnessCountOffset().Int32Value()),
1400 Immediate(1));
1401 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001402 __ popl(EAX);
1403 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001404 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1405 return;
1406 }
1407
1408 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1409 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1410 }
1411 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001412 __ jmp(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001413 }
1414}
1415
David Brazdilfc6a86a2015-06-26 10:33:45 +00001416void LocationsBuilderX86::VisitGoto(HGoto* got) {
1417 got->SetLocations(nullptr);
1418}
1419
1420void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
1421 HandleGoto(got, got->GetSuccessor());
1422}
1423
1424void LocationsBuilderX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1425 try_boundary->SetLocations(nullptr);
1426}
1427
1428void InstructionCodeGeneratorX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1429 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1430 if (!successor->IsExitBlock()) {
1431 HandleGoto(try_boundary, successor);
1432 }
1433}
1434
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001435void LocationsBuilderX86::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001436 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001437}
1438
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001439void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001440}
1441
Mark Mendell152408f2015-12-31 12:28:50 -05001442template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001443void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001444 LabelType* true_label,
1445 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001446 if (cond->IsFPConditionTrueIfNaN()) {
1447 __ j(kUnordered, true_label);
1448 } else if (cond->IsFPConditionFalseIfNaN()) {
1449 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001450 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001451 __ j(X86UnsignedOrFPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001452}
1453
Mark Mendell152408f2015-12-31 12:28:50 -05001454template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001455void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001456 LabelType* true_label,
1457 LabelType* false_label) {
Mark Mendellc4701932015-04-10 13:18:51 -04001458 LocationSummary* locations = cond->GetLocations();
1459 Location left = locations->InAt(0);
1460 Location right = locations->InAt(1);
1461 IfCondition if_cond = cond->GetCondition();
1462
Mark Mendellc4701932015-04-10 13:18:51 -04001463 Register left_high = left.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001464 Register left_low = left.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001465 IfCondition true_high_cond = if_cond;
1466 IfCondition false_high_cond = cond->GetOppositeCondition();
Aart Bike9f37602015-10-09 11:15:55 -07001467 Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
Mark Mendellc4701932015-04-10 13:18:51 -04001468
1469 // Set the conditions for the test, remembering that == needs to be
1470 // decided using the low words.
1471 switch (if_cond) {
1472 case kCondEQ:
Mark Mendellc4701932015-04-10 13:18:51 -04001473 case kCondNE:
Roland Levillain4fa13f62015-07-06 18:11:54 +01001474 // Nothing to do.
Mark Mendellc4701932015-04-10 13:18:51 -04001475 break;
1476 case kCondLT:
1477 false_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001478 break;
1479 case kCondLE:
1480 true_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001481 break;
1482 case kCondGT:
1483 false_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001484 break;
1485 case kCondGE:
1486 true_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001487 break;
Aart Bike9f37602015-10-09 11:15:55 -07001488 case kCondB:
1489 false_high_cond = kCondA;
1490 break;
1491 case kCondBE:
1492 true_high_cond = kCondB;
1493 break;
1494 case kCondA:
1495 false_high_cond = kCondB;
1496 break;
1497 case kCondAE:
1498 true_high_cond = kCondA;
1499 break;
Mark Mendellc4701932015-04-10 13:18:51 -04001500 }
1501
1502 if (right.IsConstant()) {
1503 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellc4701932015-04-10 13:18:51 -04001504 int32_t val_high = High32Bits(value);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001505 int32_t val_low = Low32Bits(value);
Mark Mendellc4701932015-04-10 13:18:51 -04001506
Aart Bika19616e2016-02-01 18:57:58 -08001507 codegen_->Compare32BitValue(left_high, val_high);
Mark Mendellc4701932015-04-10 13:18:51 -04001508 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001509 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001510 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001511 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001512 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001513 __ j(X86Condition(true_high_cond), true_label);
1514 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001515 }
1516 // Must be equal high, so compare the lows.
Aart Bika19616e2016-02-01 18:57:58 -08001517 codegen_->Compare32BitValue(left_low, val_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001518 } else if (right.IsRegisterPair()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001519 Register right_high = right.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001520 Register right_low = right.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001521
1522 __ cmpl(left_high, right_high);
1523 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001524 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001525 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001526 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001527 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001528 __ j(X86Condition(true_high_cond), true_label);
1529 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001530 }
1531 // Must be equal high, so compare the lows.
1532 __ cmpl(left_low, right_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001533 } else {
1534 DCHECK(right.IsDoubleStackSlot());
1535 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1536 if (if_cond == kCondNE) {
1537 __ j(X86Condition(true_high_cond), true_label);
1538 } else if (if_cond == kCondEQ) {
1539 __ j(X86Condition(false_high_cond), false_label);
1540 } else {
1541 __ j(X86Condition(true_high_cond), true_label);
1542 __ j(X86Condition(false_high_cond), false_label);
1543 }
1544 // Must be equal high, so compare the lows.
1545 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Mark Mendellc4701932015-04-10 13:18:51 -04001546 }
1547 // The last comparison might be unsigned.
1548 __ j(final_condition, true_label);
1549}
1550
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001551void InstructionCodeGeneratorX86::GenerateFPCompare(Location lhs,
1552 Location rhs,
1553 HInstruction* insn,
1554 bool is_double) {
1555 HX86LoadFromConstantTable* const_area = insn->InputAt(1)->AsX86LoadFromConstantTable();
1556 if (is_double) {
1557 if (rhs.IsFpuRegister()) {
1558 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1559 } else if (const_area != nullptr) {
1560 DCHECK(const_area->IsEmittedAtUseSite());
1561 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(),
1562 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001563 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
1564 const_area->GetBaseMethodAddress(),
1565 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001566 } else {
1567 DCHECK(rhs.IsDoubleStackSlot());
1568 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1569 }
1570 } else {
1571 if (rhs.IsFpuRegister()) {
1572 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1573 } else if (const_area != nullptr) {
1574 DCHECK(const_area->IsEmittedAtUseSite());
1575 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(),
1576 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001577 const_area->GetConstant()->AsFloatConstant()->GetValue(),
1578 const_area->GetBaseMethodAddress(),
1579 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001580 } else {
1581 DCHECK(rhs.IsStackSlot());
1582 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1583 }
1584 }
1585}
1586
Mark Mendell152408f2015-12-31 12:28:50 -05001587template<class LabelType>
David Brazdil0debae72015-11-12 18:37:00 +00001588void InstructionCodeGeneratorX86::GenerateCompareTestAndBranch(HCondition* condition,
Mark Mendell152408f2015-12-31 12:28:50 -05001589 LabelType* true_target_in,
1590 LabelType* false_target_in) {
David Brazdil0debae72015-11-12 18:37:00 +00001591 // Generated branching requires both targets to be explicit. If either of the
1592 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Mark Mendell152408f2015-12-31 12:28:50 -05001593 LabelType fallthrough_target;
1594 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1595 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
David Brazdil0debae72015-11-12 18:37:00 +00001596
Mark Mendellc4701932015-04-10 13:18:51 -04001597 LocationSummary* locations = condition->GetLocations();
1598 Location left = locations->InAt(0);
1599 Location right = locations->InAt(1);
1600
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001601 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001602 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001603 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001604 GenerateLongComparesAndJumps(condition, true_target, false_target);
1605 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001606 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001607 GenerateFPCompare(left, right, condition, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001608 GenerateFPJumps(condition, true_target, false_target);
1609 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001610 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001611 GenerateFPCompare(left, right, condition, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001612 GenerateFPJumps(condition, true_target, false_target);
1613 break;
1614 default:
1615 LOG(FATAL) << "Unexpected compare type " << type;
1616 }
1617
David Brazdil0debae72015-11-12 18:37:00 +00001618 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001619 __ jmp(false_target);
1620 }
David Brazdil0debae72015-11-12 18:37:00 +00001621
1622 if (fallthrough_target.IsLinked()) {
1623 __ Bind(&fallthrough_target);
1624 }
Mark Mendellc4701932015-04-10 13:18:51 -04001625}
1626
David Brazdil0debae72015-11-12 18:37:00 +00001627static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1628 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1629 // are set only strictly before `branch`. We can't use the eflags on long/FP
1630 // conditions if they are materialized due to the complex branching.
1631 return cond->IsCondition() &&
1632 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001633 cond->InputAt(0)->GetType() != DataType::Type::kInt64 &&
1634 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001635}
1636
Mark Mendell152408f2015-12-31 12:28:50 -05001637template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001638void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001639 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001640 LabelType* true_target,
1641 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001642 HInstruction* cond = instruction->InputAt(condition_input_index);
1643
1644 if (true_target == nullptr && false_target == nullptr) {
1645 // Nothing to do. The code always falls through.
1646 return;
1647 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001648 // Constant condition, statically compared against "true" (integer value 1).
1649 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001650 if (true_target != nullptr) {
1651 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001652 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001653 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001654 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001655 if (false_target != nullptr) {
1656 __ jmp(false_target);
1657 }
1658 }
1659 return;
1660 }
1661
1662 // The following code generates these patterns:
1663 // (1) true_target == nullptr && false_target != nullptr
1664 // - opposite condition true => branch to false_target
1665 // (2) true_target != nullptr && false_target == nullptr
1666 // - condition true => branch to true_target
1667 // (3) true_target != nullptr && false_target != nullptr
1668 // - condition true => branch to true_target
1669 // - branch to false_target
1670 if (IsBooleanValueOrMaterializedCondition(cond)) {
1671 if (AreEflagsSetFrom(cond, instruction)) {
1672 if (true_target == nullptr) {
1673 __ j(X86Condition(cond->AsCondition()->GetOppositeCondition()), false_target);
1674 } else {
1675 __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
1676 }
1677 } else {
1678 // Materialized condition, compare against 0.
1679 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1680 if (lhs.IsRegister()) {
1681 __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
1682 } else {
1683 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
1684 }
1685 if (true_target == nullptr) {
1686 __ j(kEqual, false_target);
1687 } else {
1688 __ j(kNotEqual, true_target);
1689 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001690 }
1691 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001692 // Condition has not been materialized, use its inputs as the comparison and
1693 // its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001694 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00001695
1696 // If this is a long or FP comparison that has been folded into
1697 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001698 DataType::Type type = condition->InputAt(0)->GetType();
1699 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001700 GenerateCompareTestAndBranch(condition, true_target, false_target);
1701 return;
1702 }
1703
1704 Location lhs = condition->GetLocations()->InAt(0);
1705 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001706 // LHS is guaranteed to be in a register (see LocationsBuilderX86::HandleCondition).
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001707 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001708 if (true_target == nullptr) {
1709 __ j(X86Condition(condition->GetOppositeCondition()), false_target);
1710 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001711 __ j(X86Condition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001712 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001713 }
David Brazdil0debae72015-11-12 18:37:00 +00001714
1715 // If neither branch falls through (case 3), the conditional branch to `true_target`
1716 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1717 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001718 __ jmp(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001719 }
1720}
1721
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001722void LocationsBuilderX86::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001723 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001724 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001725 locations->SetInAt(0, Location::Any());
1726 }
1727}
1728
1729void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001730 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1731 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1732 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1733 nullptr : codegen_->GetLabelOf(true_successor);
1734 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1735 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001736 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001737}
1738
1739void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001740 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001741 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001742 InvokeRuntimeCallingConvention calling_convention;
1743 RegisterSet caller_saves = RegisterSet::Empty();
1744 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1745 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001746 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001747 locations->SetInAt(0, Location::Any());
1748 }
1749}
1750
1751void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001752 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001753 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001754 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001755 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001756 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001757}
1758
Mingyao Yang063fc772016-08-02 11:02:54 -07001759void LocationsBuilderX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001760 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001761 LocationSummary(flag, LocationSummary::kNoCall);
1762 locations->SetOut(Location::RequiresRegister());
1763}
1764
1765void InstructionCodeGeneratorX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1766 __ movl(flag->GetLocations()->Out().AsRegister<Register>(),
1767 Address(ESP, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1768}
1769
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001770static bool SelectCanUseCMOV(HSelect* select) {
1771 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001772 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001773 return false;
1774 }
1775
1776 // A FP condition doesn't generate the single CC that we need.
1777 // In 32 bit mode, a long condition doesn't generate a single CC either.
1778 HInstruction* condition = select->GetCondition();
1779 if (condition->IsCondition()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001780 DataType::Type compare_type = condition->InputAt(0)->GetType();
1781 if (compare_type == DataType::Type::kInt64 ||
1782 DataType::IsFloatingPointType(compare_type)) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001783 return false;
1784 }
1785 }
1786
1787 // We can generate a CMOV for this Select.
1788 return true;
1789}
1790
David Brazdil74eb1b22015-12-14 11:44:01 +00001791void LocationsBuilderX86::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001792 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001793 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001794 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001795 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001796 } else {
1797 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001798 if (SelectCanUseCMOV(select)) {
1799 if (select->InputAt(1)->IsConstant()) {
1800 // Cmov can't handle a constant value.
1801 locations->SetInAt(1, Location::RequiresRegister());
1802 } else {
1803 locations->SetInAt(1, Location::Any());
1804 }
1805 } else {
1806 locations->SetInAt(1, Location::Any());
1807 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001808 }
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001809 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1810 locations->SetInAt(2, Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00001811 }
1812 locations->SetOut(Location::SameAsFirstInput());
1813}
1814
1815void InstructionCodeGeneratorX86::VisitSelect(HSelect* select) {
1816 LocationSummary* locations = select->GetLocations();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001817 DCHECK(locations->InAt(0).Equals(locations->Out()));
1818 if (SelectCanUseCMOV(select)) {
1819 // If both the condition and the source types are integer, we can generate
1820 // a CMOV to implement Select.
1821
1822 HInstruction* select_condition = select->GetCondition();
1823 Condition cond = kNotEqual;
1824
1825 // Figure out how to test the 'condition'.
1826 if (select_condition->IsCondition()) {
1827 HCondition* condition = select_condition->AsCondition();
1828 if (!condition->IsEmittedAtUseSite()) {
1829 // This was a previously materialized condition.
1830 // Can we use the existing condition code?
1831 if (AreEflagsSetFrom(condition, select)) {
1832 // Materialization was the previous instruction. Condition codes are right.
1833 cond = X86Condition(condition->GetCondition());
1834 } else {
1835 // No, we have to recreate the condition code.
1836 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1837 __ testl(cond_reg, cond_reg);
1838 }
1839 } else {
1840 // We can't handle FP or long here.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001841 DCHECK_NE(condition->InputAt(0)->GetType(), DataType::Type::kInt64);
1842 DCHECK(!DataType::IsFloatingPointType(condition->InputAt(0)->GetType()));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001843 LocationSummary* cond_locations = condition->GetLocations();
Roland Levillain0b671c02016-08-19 12:02:34 +01001844 codegen_->GenerateIntCompare(cond_locations->InAt(0), cond_locations->InAt(1));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001845 cond = X86Condition(condition->GetCondition());
1846 }
1847 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001848 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001849 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1850 __ testl(cond_reg, cond_reg);
1851 }
1852
1853 // If the condition is true, overwrite the output, which already contains false.
1854 Location false_loc = locations->InAt(0);
1855 Location true_loc = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001856 if (select->GetType() == DataType::Type::kInt64) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001857 // 64 bit conditional move.
1858 Register false_high = false_loc.AsRegisterPairHigh<Register>();
1859 Register false_low = false_loc.AsRegisterPairLow<Register>();
1860 if (true_loc.IsRegisterPair()) {
1861 __ cmovl(cond, false_high, true_loc.AsRegisterPairHigh<Register>());
1862 __ cmovl(cond, false_low, true_loc.AsRegisterPairLow<Register>());
1863 } else {
1864 __ cmovl(cond, false_high, Address(ESP, true_loc.GetHighStackIndex(kX86WordSize)));
1865 __ cmovl(cond, false_low, Address(ESP, true_loc.GetStackIndex()));
1866 }
1867 } else {
1868 // 32 bit conditional move.
1869 Register false_reg = false_loc.AsRegister<Register>();
1870 if (true_loc.IsRegister()) {
1871 __ cmovl(cond, false_reg, true_loc.AsRegister<Register>());
1872 } else {
1873 __ cmovl(cond, false_reg, Address(ESP, true_loc.GetStackIndex()));
1874 }
1875 }
1876 } else {
1877 NearLabel false_target;
1878 GenerateTestAndBranch<NearLabel>(
Andreas Gampe3db70682018-12-26 15:12:03 -08001879 select, /* condition_input_index= */ 2, /* true_target= */ nullptr, &false_target);
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001880 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1881 __ Bind(&false_target);
1882 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001883}
1884
David Srbecky0cf44932015-12-09 14:09:59 +00001885void LocationsBuilderX86::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001886 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001887}
1888
David Srbeckyd28f4a02016-03-14 17:14:24 +00001889void InstructionCodeGeneratorX86::VisitNativeDebugInfo(HNativeDebugInfo*) {
1890 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001891}
1892
1893void CodeGeneratorX86::GenerateNop() {
1894 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001895}
1896
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001897void LocationsBuilderX86::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001898 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001899 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001900 // Handle the long/FP comparisons made in instruction simplification.
1901 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001902 case DataType::Type::kInt64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001903 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell8659e842016-02-16 10:41:46 -05001904 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001905 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001906 locations->SetOut(Location::RequiresRegister());
1907 }
1908 break;
1909 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001910 case DataType::Type::kFloat32:
1911 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001912 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001913 if (cond->InputAt(1)->IsX86LoadFromConstantTable()) {
1914 DCHECK(cond->InputAt(1)->IsEmittedAtUseSite());
1915 } else if (cond->InputAt(1)->IsConstant()) {
1916 locations->SetInAt(1, Location::RequiresFpuRegister());
1917 } else {
1918 locations->SetInAt(1, Location::Any());
1919 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001920 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001921 locations->SetOut(Location::RequiresRegister());
1922 }
1923 break;
1924 }
1925 default:
1926 locations->SetInAt(0, Location::RequiresRegister());
1927 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001928 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001929 // We need a byte register.
1930 locations->SetOut(Location::RegisterLocation(ECX));
1931 }
1932 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001933 }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001934}
1935
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001936void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001937 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001938 return;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001939 }
Mark Mendellc4701932015-04-10 13:18:51 -04001940
1941 LocationSummary* locations = cond->GetLocations();
1942 Location lhs = locations->InAt(0);
1943 Location rhs = locations->InAt(1);
1944 Register reg = locations->Out().AsRegister<Register>();
Mark Mendell152408f2015-12-31 12:28:50 -05001945 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001946
1947 switch (cond->InputAt(0)->GetType()) {
1948 default: {
1949 // Integer case.
1950
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01001951 // Clear output register: setb only sets the low byte.
Mark Mendellc4701932015-04-10 13:18:51 -04001952 __ xorl(reg, reg);
Roland Levillain0b671c02016-08-19 12:02:34 +01001953 codegen_->GenerateIntCompare(lhs, rhs);
Aart Bike9f37602015-10-09 11:15:55 -07001954 __ setb(X86Condition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001955 return;
1956 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001957 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001958 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1959 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001960 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001961 GenerateFPCompare(lhs, rhs, cond, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001962 GenerateFPJumps(cond, &true_label, &false_label);
1963 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001964 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001965 GenerateFPCompare(lhs, rhs, cond, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001966 GenerateFPJumps(cond, &true_label, &false_label);
1967 break;
1968 }
1969
1970 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001971 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001972
Roland Levillain4fa13f62015-07-06 18:11:54 +01001973 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001974 __ Bind(&false_label);
1975 __ xorl(reg, reg);
1976 __ jmp(&done_label);
1977
Roland Levillain4fa13f62015-07-06 18:11:54 +01001978 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001979 __ Bind(&true_label);
1980 __ movl(reg, Immediate(1));
1981 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001982}
1983
1984void LocationsBuilderX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001985 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001986}
1987
1988void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001989 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001990}
1991
1992void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001993 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001994}
1995
1996void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001997 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001998}
1999
2000void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002001 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002002}
2003
2004void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002005 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002006}
2007
2008void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002009 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002010}
2011
2012void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002013 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002014}
2015
2016void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002017 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002018}
2019
2020void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002021 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002022}
2023
2024void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002025 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002026}
2027
2028void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002029 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002030}
2031
Aart Bike9f37602015-10-09 11:15:55 -07002032void LocationsBuilderX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002033 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002034}
2035
2036void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002037 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002038}
2039
2040void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002041 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002042}
2043
2044void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002045 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002046}
2047
2048void LocationsBuilderX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002049 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002050}
2051
2052void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002053 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002054}
2055
2056void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002057 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002058}
2059
2060void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002061 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002062}
2063
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002064void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002065 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002066 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002067 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002068}
2069
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002070void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002071 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002072}
2073
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002074void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
2075 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002076 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002077 locations->SetOut(Location::ConstantLocation(constant));
2078}
2079
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002080void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002081 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002082}
2083
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002084void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002085 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002086 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002087 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002088}
2089
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002090void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002091 // Will be generated at use site.
2092}
2093
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002094void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
2095 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002096 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002097 locations->SetOut(Location::ConstantLocation(constant));
2098}
2099
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002100void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002101 // Will be generated at use site.
2102}
2103
2104void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
2105 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002106 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002107 locations->SetOut(Location::ConstantLocation(constant));
2108}
2109
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002110void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002111 // Will be generated at use site.
2112}
2113
Igor Murashkind01745e2017-04-05 16:40:31 -07002114void LocationsBuilderX86::VisitConstructorFence(HConstructorFence* constructor_fence) {
2115 constructor_fence->SetLocations(nullptr);
2116}
2117
2118void InstructionCodeGeneratorX86::VisitConstructorFence(
2119 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2120 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2121}
2122
Calin Juravle27df7582015-04-17 19:12:31 +01002123void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2124 memory_barrier->SetLocations(nullptr);
2125}
2126
2127void InstructionCodeGeneratorX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002128 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002129}
2130
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002131void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002132 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002133}
2134
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002135void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002136 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002137}
2138
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002139void LocationsBuilderX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002140 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002141 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002142 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002143 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002144 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002145 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002146 case DataType::Type::kInt8:
2147 case DataType::Type::kUint16:
2148 case DataType::Type::kInt16:
2149 case DataType::Type::kInt32:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002150 locations->SetInAt(0, Location::RegisterLocation(EAX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002151 break;
2152
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002153 case DataType::Type::kInt64:
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002154 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002155 0, Location::RegisterPairLocation(EAX, EDX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002156 break;
2157
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002158 case DataType::Type::kFloat32:
2159 case DataType::Type::kFloat64:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002160 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002161 0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002162 break;
2163
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002164 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002165 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002166 }
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002167}
2168
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002169void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002170 if (kIsDebugBuild) {
2171 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002172 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002173 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002174 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002175 case DataType::Type::kInt8:
2176 case DataType::Type::kUint16:
2177 case DataType::Type::kInt16:
2178 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002179 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002180 break;
2181
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002182 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002183 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
2184 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002185 break;
2186
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002187 case DataType::Type::kFloat32:
2188 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002189 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002190 break;
2191
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002192 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002193 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002194 }
2195 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002196 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002197}
2198
Calin Juravle175dc732015-08-25 15:42:32 +01002199void LocationsBuilderX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2200 // The trampoline uses the same calling convention as dex calling conventions,
2201 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2202 // the method_idx.
2203 HandleInvoke(invoke);
2204}
2205
2206void InstructionCodeGeneratorX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2207 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2208}
2209
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002210void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002211 // Explicit clinit checks triggered by static invokes must have been pruned by
2212 // art::PrepareForRegisterAllocation.
2213 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002214
Mark Mendellfb8d2792015-03-31 22:16:59 -04002215 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002216 if (intrinsic.TryDispatch(invoke)) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002217 if (invoke->GetLocations()->CanCall() &&
2218 invoke->HasPcRelativeMethodLoadKind() &&
2219 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).IsInvalid()) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00002220 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002221 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002222 return;
2223 }
2224
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002225 HandleInvoke(invoke);
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002226
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002227 // For PC-relative dex cache the invoke has an extra input, the PC-relative address base.
Vladimir Marko65979462017-05-19 17:25:12 +01002228 if (invoke->HasPcRelativeMethodLoadKind()) {
Vladimir Markob4536b72015-11-24 13:45:23 +00002229 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002230 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002231}
2232
Mark Mendell09ed1a32015-03-25 08:30:06 -04002233static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) {
2234 if (invoke->GetLocations()->Intrinsified()) {
2235 IntrinsicCodeGeneratorX86 intrinsic(codegen);
2236 intrinsic.Dispatch(invoke);
2237 return true;
2238 }
2239 return false;
2240}
2241
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002242void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002243 // Explicit clinit checks triggered by static invokes must have been pruned by
2244 // art::PrepareForRegisterAllocation.
2245 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002246
Mark Mendell09ed1a32015-03-25 08:30:06 -04002247 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2248 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002249 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002250
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002251 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002252 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002253 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002254}
2255
2256void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00002257 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2258 if (intrinsic.TryDispatch(invoke)) {
2259 return;
2260 }
2261
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002262 HandleInvoke(invoke);
2263}
2264
2265void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002266 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002267 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002268}
2269
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002270void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002271 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2272 return;
2273 }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002274
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002275 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002276 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002277}
2278
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002279void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002280 // This call to HandleInvoke allocates a temporary (core) register
2281 // which is also used to transfer the hidden argument from FP to
2282 // core register.
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002283 HandleInvoke(invoke);
2284 // Add the hidden argument.
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002285 invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM7));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002286}
2287
2288void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
2289 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002290 LocationSummary* locations = invoke->GetLocations();
2291 Register temp = locations->GetTemp(0).AsRegister<Register>();
2292 XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002293 Location receiver = locations->InAt(0);
2294 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2295
Roland Levillain0d5a2812015-11-13 10:07:31 +00002296 // Set the hidden argument. This is safe to do this here, as XMM7
2297 // won't be modified thereafter, before the `call` instruction.
2298 DCHECK_EQ(XMM7, hidden_reg);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002299 __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002300 __ movd(hidden_reg, temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002301
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002302 if (receiver.IsStackSlot()) {
2303 __ movl(temp, Address(ESP, receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002304 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002305 __ movl(temp, Address(temp, class_offset));
2306 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002307 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002308 __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002309 }
Roland Levillain4d027112015-07-01 15:41:14 +01002310 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002311 // Instead of simply (possibly) unpoisoning `temp` here, we should
2312 // emit a read barrier for the previous class reference load.
2313 // However this is not required in practice, as this is an
2314 // intermediate/temporary reference and because the current
2315 // concurrent copying collector keeps the from-space memory
2316 // intact/accessible until the end of the marking phase (the
2317 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002318 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002319 // temp = temp->GetAddressOfIMT()
2320 __ movl(temp,
2321 Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002322 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002323 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002324 invoke->GetImtIndex(), kX86PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002325 __ movl(temp, Address(temp, method_offset));
2326 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002327 __ call(Address(temp,
Andreas Gampe542451c2016-07-26 09:02:02 -07002328 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002329
2330 DCHECK(!codegen_->IsLeafMethod());
2331 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2332}
2333
Orion Hodsonac141392017-01-13 11:53:47 +00002334void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2335 HandleInvoke(invoke);
2336}
2337
2338void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2339 codegen_->GenerateInvokePolymorphicCall(invoke);
2340}
2341
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002342void LocationsBuilderX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2343 HandleInvoke(invoke);
2344}
2345
2346void InstructionCodeGeneratorX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2347 codegen_->GenerateInvokeCustomCall(invoke);
2348}
2349
Roland Levillain88cb1752014-10-20 16:36:47 +01002350void LocationsBuilderX86::VisitNeg(HNeg* neg) {
2351 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002352 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002353 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002354 case DataType::Type::kInt32:
2355 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002356 locations->SetInAt(0, Location::RequiresRegister());
2357 locations->SetOut(Location::SameAsFirstInput());
2358 break;
2359
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002360 case DataType::Type::kFloat32:
Roland Levillain5368c212014-11-27 15:03:41 +00002361 locations->SetInAt(0, Location::RequiresFpuRegister());
2362 locations->SetOut(Location::SameAsFirstInput());
2363 locations->AddTemp(Location::RequiresRegister());
2364 locations->AddTemp(Location::RequiresFpuRegister());
2365 break;
2366
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002367 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002368 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002369 locations->SetOut(Location::SameAsFirstInput());
2370 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002371 break;
2372
2373 default:
2374 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2375 }
2376}
2377
2378void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
2379 LocationSummary* locations = neg->GetLocations();
2380 Location out = locations->Out();
2381 Location in = locations->InAt(0);
2382 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002383 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002384 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002385 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002386 __ negl(out.AsRegister<Register>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002387 break;
2388
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002389 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002390 DCHECK(in.IsRegisterPair());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002391 DCHECK(in.Equals(out));
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002392 __ negl(out.AsRegisterPairLow<Register>());
2393 // Negation is similar to subtraction from zero. The least
2394 // significant byte triggers a borrow when it is different from
2395 // zero; to take it into account, add 1 to the most significant
2396 // byte if the carry flag (CF) is set to 1 after the first NEGL
2397 // operation.
2398 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2399 __ negl(out.AsRegisterPairHigh<Register>());
2400 break;
2401
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002402 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002403 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002404 Register constant = locations->GetTemp(0).AsRegister<Register>();
2405 XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002406 // Implement float negation with an exclusive or with value
2407 // 0x80000000 (mask for bit 31, representing the sign of a
2408 // single-precision floating-point number).
2409 __ movl(constant, Immediate(INT32_C(0x80000000)));
2410 __ movd(mask, constant);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002411 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002412 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002413 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002414
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002415 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002416 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002417 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002418 // Implement double negation with an exclusive or with value
2419 // 0x8000000000000000 (mask for bit 63, representing the sign of
2420 // a double-precision floating-point number).
2421 __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002422 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002423 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002424 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002425
2426 default:
2427 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2428 }
2429}
2430
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002431void LocationsBuilderX86::VisitX86FPNeg(HX86FPNeg* neg) {
2432 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002433 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002434 DCHECK(DataType::IsFloatingPointType(neg->GetType()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002435 locations->SetInAt(0, Location::RequiresFpuRegister());
2436 locations->SetInAt(1, Location::RequiresRegister());
2437 locations->SetOut(Location::SameAsFirstInput());
2438 locations->AddTemp(Location::RequiresFpuRegister());
2439}
2440
2441void InstructionCodeGeneratorX86::VisitX86FPNeg(HX86FPNeg* neg) {
2442 LocationSummary* locations = neg->GetLocations();
2443 Location out = locations->Out();
2444 DCHECK(locations->InAt(0).Equals(out));
2445
2446 Register constant_area = locations->InAt(1).AsRegister<Register>();
2447 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002448 if (neg->GetType() == DataType::Type::kFloat32) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002449 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x80000000),
2450 neg->GetBaseMethodAddress(),
2451 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002452 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2453 } else {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002454 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000),
2455 neg->GetBaseMethodAddress(),
2456 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002457 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2458 }
2459}
2460
Roland Levillaindff1f282014-11-05 14:15:05 +00002461void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002462 DataType::Type result_type = conversion->GetResultType();
2463 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002464 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2465 << input_type << " -> " << result_type;
Roland Levillain624279f2014-12-04 11:54:28 +00002466
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002467 // The float-to-long and double-to-long type conversions rely on a
2468 // call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00002469 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002470 ((input_type == DataType::Type::kFloat32 || input_type == DataType::Type::kFloat64)
2471 && result_type == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002472 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00002473 : LocationSummary::kNoCall;
2474 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002475 new (GetGraph()->GetAllocator()) LocationSummary(conversion, call_kind);
Roland Levillain624279f2014-12-04 11:54:28 +00002476
Roland Levillaindff1f282014-11-05 14:15:05 +00002477 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002478 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002479 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002480 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002481 case DataType::Type::kUint8:
2482 case DataType::Type::kInt8:
2483 case DataType::Type::kUint16:
2484 case DataType::Type::kInt16:
2485 case DataType::Type::kInt32:
2486 locations->SetInAt(0, Location::ByteRegisterOrConstant(ECX, conversion->InputAt(0)));
2487 // Make the output overlap to please the register allocator. This greatly simplifies
2488 // the validation of the linear scan implementation
2489 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2490 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002491 case DataType::Type::kInt64: {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002492 HInstruction* input = conversion->InputAt(0);
2493 Location input_location = input->IsConstant()
2494 ? Location::ConstantLocation(input->AsConstant())
2495 : Location::RegisterPairLocation(EAX, EDX);
2496 locations->SetInAt(0, input_location);
2497 // Make the output overlap to please the register allocator. This greatly simplifies
2498 // the validation of the linear scan implementation
2499 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2500 break;
2501 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002502
2503 default:
2504 LOG(FATAL) << "Unexpected type conversion from " << input_type
2505 << " to " << result_type;
2506 }
2507 break;
2508
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002509 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002510 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002511 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2512 locations->SetInAt(0, Location::Any());
2513 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002514 break;
2515
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002516 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002517 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002518 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002519 locations->SetInAt(0, Location::Any());
2520 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2521 break;
2522
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002523 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002524 locations->SetInAt(0, Location::RequiresFpuRegister());
2525 locations->SetOut(Location::RequiresRegister());
2526 locations->AddTemp(Location::RequiresFpuRegister());
2527 break;
2528
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002529 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002530 locations->SetInAt(0, Location::RequiresFpuRegister());
2531 locations->SetOut(Location::RequiresRegister());
2532 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002533 break;
2534
2535 default:
2536 LOG(FATAL) << "Unexpected type conversion from " << input_type
2537 << " to " << result_type;
2538 }
2539 break;
2540
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002541 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002542 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002543 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002544 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002545 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002546 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002547 case DataType::Type::kInt16:
2548 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002549 locations->SetInAt(0, Location::RegisterLocation(EAX));
2550 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2551 break;
2552
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002553 case DataType::Type::kFloat32:
2554 case DataType::Type::kFloat64: {
Vladimir Marko949c91f2015-01-27 10:48:44 +00002555 InvokeRuntimeCallingConvention calling_convention;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002556 XmmRegister parameter = calling_convention.GetFpuRegisterAt(0);
2557 locations->SetInAt(0, Location::FpuRegisterLocation(parameter));
2558
Vladimir Marko949c91f2015-01-27 10:48:44 +00002559 // The runtime helper puts the result in EAX, EDX.
2560 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Vladimir Marko949c91f2015-01-27 10:48:44 +00002561 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002562 break;
Roland Levillaindff1f282014-11-05 14:15:05 +00002563
2564 default:
2565 LOG(FATAL) << "Unexpected type conversion from " << input_type
2566 << " to " << result_type;
2567 }
2568 break;
2569
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002570 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002571 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002572 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002573 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002574 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002575 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002576 case DataType::Type::kInt16:
2577 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00002578 locations->SetInAt(0, Location::RequiresRegister());
2579 locations->SetOut(Location::RequiresFpuRegister());
2580 break;
2581
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002582 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01002583 locations->SetInAt(0, Location::Any());
2584 locations->SetOut(Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002585 break;
2586
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002587 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002588 locations->SetInAt(0, Location::RequiresFpuRegister());
2589 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002590 break;
2591
2592 default:
2593 LOG(FATAL) << "Unexpected type conversion from " << input_type
2594 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002595 }
Roland Levillaincff13742014-11-17 14:32:17 +00002596 break;
2597
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002598 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002599 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002600 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002601 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002602 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002603 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002604 case DataType::Type::kInt16:
2605 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00002606 locations->SetInAt(0, Location::RequiresRegister());
2607 locations->SetOut(Location::RequiresFpuRegister());
2608 break;
2609
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002610 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01002611 locations->SetInAt(0, Location::Any());
2612 locations->SetOut(Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002613 break;
2614
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002615 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002616 locations->SetInAt(0, Location::RequiresFpuRegister());
2617 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002618 break;
2619
2620 default:
2621 LOG(FATAL) << "Unexpected type conversion from " << input_type
2622 << " to " << result_type;
2623 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002624 break;
2625
2626 default:
2627 LOG(FATAL) << "Unexpected type conversion from " << input_type
2628 << " to " << result_type;
2629 }
2630}
2631
2632void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
2633 LocationSummary* locations = conversion->GetLocations();
2634 Location out = locations->Out();
2635 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002636 DataType::Type result_type = conversion->GetResultType();
2637 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002638 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2639 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002640 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002641 case DataType::Type::kUint8:
2642 switch (input_type) {
2643 case DataType::Type::kInt8:
2644 case DataType::Type::kUint16:
2645 case DataType::Type::kInt16:
2646 case DataType::Type::kInt32:
2647 if (in.IsRegister()) {
2648 __ movzxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
2649 } else {
2650 DCHECK(in.GetConstant()->IsIntConstant());
2651 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2652 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
2653 }
2654 break;
2655 case DataType::Type::kInt64:
2656 if (in.IsRegisterPair()) {
2657 __ movzxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2658 } else {
2659 DCHECK(in.GetConstant()->IsLongConstant());
2660 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2661 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
2662 }
2663 break;
2664
2665 default:
2666 LOG(FATAL) << "Unexpected type conversion from " << input_type
2667 << " to " << result_type;
2668 }
2669 break;
2670
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002671 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002672 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002673 case DataType::Type::kUint8:
2674 case DataType::Type::kUint16:
2675 case DataType::Type::kInt16:
2676 case DataType::Type::kInt32:
2677 if (in.IsRegister()) {
2678 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
2679 } else {
2680 DCHECK(in.GetConstant()->IsIntConstant());
2681 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2682 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2683 }
2684 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002685 case DataType::Type::kInt64:
Vladimir Markob52bbde2016-02-12 12:06:05 +00002686 if (in.IsRegisterPair()) {
2687 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2688 } else {
2689 DCHECK(in.GetConstant()->IsLongConstant());
2690 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2691 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2692 }
2693 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002694
2695 default:
2696 LOG(FATAL) << "Unexpected type conversion from " << input_type
2697 << " to " << result_type;
2698 }
2699 break;
2700
2701 case DataType::Type::kUint16:
2702 switch (input_type) {
2703 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002704 case DataType::Type::kInt16:
2705 case DataType::Type::kInt32:
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002706 if (in.IsRegister()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002707 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
2708 } else if (in.IsStackSlot()) {
2709 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002710 } else {
2711 DCHECK(in.GetConstant()->IsIntConstant());
2712 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002713 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2714 }
2715 break;
2716 case DataType::Type::kInt64:
2717 if (in.IsRegisterPair()) {
2718 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2719 } else if (in.IsDoubleStackSlot()) {
2720 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2721 } else {
2722 DCHECK(in.GetConstant()->IsLongConstant());
2723 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2724 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002725 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002726 break;
2727
2728 default:
2729 LOG(FATAL) << "Unexpected type conversion from " << input_type
2730 << " to " << result_type;
2731 }
2732 break;
2733
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002734 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002735 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002736 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002737 case DataType::Type::kInt32:
Roland Levillain01a8d712014-11-14 16:27:39 +00002738 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002739 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002740 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002741 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain01a8d712014-11-14 16:27:39 +00002742 } else {
2743 DCHECK(in.GetConstant()->IsIntConstant());
2744 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002745 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
Roland Levillain01a8d712014-11-14 16:27:39 +00002746 }
2747 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002748 case DataType::Type::kInt64:
2749 if (in.IsRegisterPair()) {
2750 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2751 } else if (in.IsDoubleStackSlot()) {
2752 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2753 } else {
2754 DCHECK(in.GetConstant()->IsLongConstant());
2755 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2756 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2757 }
2758 break;
Roland Levillain01a8d712014-11-14 16:27:39 +00002759
2760 default:
2761 LOG(FATAL) << "Unexpected type conversion from " << input_type
2762 << " to " << result_type;
2763 }
2764 break;
2765
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002766 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002767 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002768 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002769 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002770 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00002771 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002772 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain946e1432014-11-11 17:35:19 +00002773 } else {
2774 DCHECK(in.IsConstant());
2775 DCHECK(in.GetConstant()->IsLongConstant());
2776 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002777 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002778 }
2779 break;
2780
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002781 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002782 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2783 Register output = out.AsRegister<Register>();
2784 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002785 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002786
2787 __ movl(output, Immediate(kPrimIntMax));
2788 // temp = int-to-float(output)
2789 __ cvtsi2ss(temp, output);
2790 // if input >= temp goto done
2791 __ comiss(input, temp);
2792 __ j(kAboveEqual, &done);
2793 // if input == NaN goto nan
2794 __ j(kUnordered, &nan);
2795 // output = float-to-int-truncate(input)
2796 __ cvttss2si(output, input);
2797 __ jmp(&done);
2798 __ Bind(&nan);
2799 // output = 0
2800 __ xorl(output, output);
2801 __ Bind(&done);
2802 break;
2803 }
2804
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002805 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002806 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2807 Register output = out.AsRegister<Register>();
2808 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002809 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002810
2811 __ movl(output, Immediate(kPrimIntMax));
2812 // temp = int-to-double(output)
2813 __ cvtsi2sd(temp, output);
2814 // if input >= temp goto done
2815 __ comisd(input, temp);
2816 __ j(kAboveEqual, &done);
2817 // if input == NaN goto nan
2818 __ j(kUnordered, &nan);
2819 // output = double-to-int-truncate(input)
2820 __ cvttsd2si(output, input);
2821 __ jmp(&done);
2822 __ Bind(&nan);
2823 // output = 0
2824 __ xorl(output, output);
2825 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002826 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002827 }
Roland Levillain946e1432014-11-11 17:35:19 +00002828
2829 default:
2830 LOG(FATAL) << "Unexpected type conversion from " << input_type
2831 << " to " << result_type;
2832 }
2833 break;
2834
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002835 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002836 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002837 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002838 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002839 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002840 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002841 case DataType::Type::kInt16:
2842 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002843 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
2844 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002845 DCHECK_EQ(in.AsRegister<Register>(), EAX);
Roland Levillaindff1f282014-11-05 14:15:05 +00002846 __ cdq();
2847 break;
2848
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002849 case DataType::Type::kFloat32:
Serban Constantinescuba45db02016-07-12 22:53:02 +01002850 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002851 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00002852 break;
2853
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002854 case DataType::Type::kFloat64:
Serban Constantinescuba45db02016-07-12 22:53:02 +01002855 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002856 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00002857 break;
2858
2859 default:
2860 LOG(FATAL) << "Unexpected type conversion from " << input_type
2861 << " to " << result_type;
2862 }
2863 break;
2864
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002865 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002866 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002867 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002868 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002869 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002870 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002871 case DataType::Type::kInt16:
2872 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002873 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002874 break;
2875
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002876 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01002877 size_t adjustment = 0;
Roland Levillain6d0e4832014-11-27 18:31:21 +00002878
Roland Levillain232ade02015-04-20 15:14:36 +01002879 // Create stack space for the call to
2880 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstps below.
2881 // TODO: enhance register allocator to ask for stack temporaries.
2882 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002883 adjustment = DataType::Size(DataType::Type::kInt64);
Roland Levillain232ade02015-04-20 15:14:36 +01002884 __ subl(ESP, Immediate(adjustment));
2885 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002886
Roland Levillain232ade02015-04-20 15:14:36 +01002887 // Load the value to the FP stack, using temporaries if needed.
2888 PushOntoFPStack(in, 0, adjustment, false, true);
2889
2890 if (out.IsStackSlot()) {
2891 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
2892 } else {
2893 __ fstps(Address(ESP, 0));
2894 Location stack_temp = Location::StackSlot(0);
2895 codegen_->Move32(out, stack_temp);
2896 }
2897
2898 // Remove the temporary stack space we allocated.
2899 if (adjustment != 0) {
2900 __ addl(ESP, Immediate(adjustment));
2901 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002902 break;
2903 }
2904
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002905 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002906 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002907 break;
2908
2909 default:
2910 LOG(FATAL) << "Unexpected type conversion from " << input_type
2911 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002912 }
Roland Levillaincff13742014-11-17 14:32:17 +00002913 break;
2914
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002915 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002916 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002917 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002918 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002919 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002920 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002921 case DataType::Type::kInt16:
2922 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002923 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002924 break;
2925
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002926 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01002927 size_t adjustment = 0;
Roland Levillain647b9ed2014-11-27 12:06:00 +00002928
Roland Levillain232ade02015-04-20 15:14:36 +01002929 // Create stack space for the call to
2930 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstpl below.
2931 // TODO: enhance register allocator to ask for stack temporaries.
2932 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002933 adjustment = DataType::Size(DataType::Type::kInt64);
Roland Levillain232ade02015-04-20 15:14:36 +01002934 __ subl(ESP, Immediate(adjustment));
2935 }
2936
2937 // Load the value to the FP stack, using temporaries if needed.
2938 PushOntoFPStack(in, 0, adjustment, false, true);
2939
2940 if (out.IsDoubleStackSlot()) {
2941 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
2942 } else {
2943 __ fstpl(Address(ESP, 0));
2944 Location stack_temp = Location::DoubleStackSlot(0);
2945 codegen_->Move64(out, stack_temp);
2946 }
2947
2948 // Remove the temporary stack space we allocated.
2949 if (adjustment != 0) {
2950 __ addl(ESP, Immediate(adjustment));
2951 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002952 break;
2953 }
2954
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002955 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002956 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002957 break;
2958
2959 default:
2960 LOG(FATAL) << "Unexpected type conversion from " << input_type
2961 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002962 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002963 break;
2964
2965 default:
2966 LOG(FATAL) << "Unexpected type conversion from " << input_type
2967 << " to " << result_type;
2968 }
2969}
2970
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002971void LocationsBuilderX86::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002972 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002973 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002974 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002975 case DataType::Type::kInt32: {
Mark Mendell09b84632015-02-13 17:48:38 -05002976 locations->SetInAt(0, Location::RequiresRegister());
2977 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2978 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2979 break;
2980 }
2981
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002982 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002983 locations->SetInAt(0, Location::RequiresRegister());
2984 locations->SetInAt(1, Location::Any());
2985 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002986 break;
2987 }
2988
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002989 case DataType::Type::kFloat32:
2990 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002991 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002992 if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
2993 DCHECK(add->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00002994 } else if (add->InputAt(1)->IsConstant()) {
2995 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002996 } else {
2997 locations->SetInAt(1, Location::Any());
2998 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002999 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003000 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003001 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003002
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003003 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003004 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Elliott Hughesc1896c92018-11-29 11:33:18 -08003005 UNREACHABLE();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003006 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003007}
3008
3009void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
3010 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003011 Location first = locations->InAt(0);
3012 Location second = locations->InAt(1);
Mark Mendell09b84632015-02-13 17:48:38 -05003013 Location out = locations->Out();
3014
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003015 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003016 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003017 if (second.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003018 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3019 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003020 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3021 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
Mark Mendell09b84632015-02-13 17:48:38 -05003022 } else {
3023 __ leal(out.AsRegister<Register>(), Address(
3024 first.AsRegister<Register>(), second.AsRegister<Register>(), TIMES_1, 0));
3025 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003026 } else if (second.IsConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003027 int32_t value = second.GetConstant()->AsIntConstant()->GetValue();
3028 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3029 __ addl(out.AsRegister<Register>(), Immediate(value));
3030 } else {
3031 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
3032 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003033 } else {
Mark Mendell09b84632015-02-13 17:48:38 -05003034 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003035 __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003036 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003037 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003038 }
3039
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003040 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003041 if (second.IsRegisterPair()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003042 __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3043 __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003044 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003045 __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3046 __ adcl(first.AsRegisterPairHigh<Register>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003047 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003048 } else {
3049 DCHECK(second.IsConstant()) << second;
3050 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3051 __ addl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3052 __ adcl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003053 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003054 break;
3055 }
3056
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003057 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003058 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003059 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003060 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3061 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003062 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003063 __ addss(first.AsFpuRegister<XmmRegister>(),
3064 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003065 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3066 const_area->GetBaseMethodAddress(),
3067 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003068 } else {
3069 DCHECK(second.IsStackSlot());
3070 __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003071 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003072 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003073 }
3074
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003075 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003076 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003077 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003078 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3079 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003080 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003081 __ addsd(first.AsFpuRegister<XmmRegister>(),
3082 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003083 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3084 const_area->GetBaseMethodAddress(),
3085 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003086 } else {
3087 DCHECK(second.IsDoubleStackSlot());
3088 __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003089 }
3090 break;
3091 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003092
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003093 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003094 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003095 }
3096}
3097
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003098void LocationsBuilderX86::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003099 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003100 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003101 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003102 case DataType::Type::kInt32:
3103 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003104 locations->SetInAt(0, Location::RequiresRegister());
3105 locations->SetInAt(1, Location::Any());
3106 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003107 break;
3108 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003109 case DataType::Type::kFloat32:
3110 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003111 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003112 if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3113 DCHECK(sub->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003114 } else if (sub->InputAt(1)->IsConstant()) {
3115 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003116 } else {
3117 locations->SetInAt(1, Location::Any());
3118 }
Calin Juravle11351682014-10-23 15:38:15 +01003119 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003120 break;
Calin Juravle11351682014-10-23 15:38:15 +01003121 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003122
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003123 default:
Calin Juravle11351682014-10-23 15:38:15 +01003124 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003125 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003126}
3127
3128void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
3129 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003130 Location first = locations->InAt(0);
3131 Location second = locations->InAt(1);
Calin Juravle11351682014-10-23 15:38:15 +01003132 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003133 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003134 case DataType::Type::kInt32: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003135 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003136 __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003137 } else if (second.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00003138 __ subl(first.AsRegister<Register>(),
3139 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003140 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003141 __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003142 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003143 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003144 }
3145
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003146 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003147 if (second.IsRegisterPair()) {
Calin Juravle11351682014-10-23 15:38:15 +01003148 __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3149 __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003150 } else if (second.IsDoubleStackSlot()) {
Calin Juravle11351682014-10-23 15:38:15 +01003151 __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003152 __ sbbl(first.AsRegisterPairHigh<Register>(),
3153 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003154 } else {
3155 DCHECK(second.IsConstant()) << second;
3156 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3157 __ subl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3158 __ sbbl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003159 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003160 break;
3161 }
3162
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003163 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003164 if (second.IsFpuRegister()) {
3165 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3166 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3167 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003168 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003169 __ subss(first.AsFpuRegister<XmmRegister>(),
3170 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003171 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3172 const_area->GetBaseMethodAddress(),
3173 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003174 } else {
3175 DCHECK(second.IsStackSlot());
3176 __ subss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3177 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003178 break;
Calin Juravle11351682014-10-23 15:38:15 +01003179 }
3180
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003181 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003182 if (second.IsFpuRegister()) {
3183 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3184 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3185 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003186 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003187 __ subsd(first.AsFpuRegister<XmmRegister>(),
3188 codegen_->LiteralDoubleAddress(
3189 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003190 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003191 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3192 } else {
3193 DCHECK(second.IsDoubleStackSlot());
3194 __ subsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3195 }
Calin Juravle11351682014-10-23 15:38:15 +01003196 break;
3197 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003198
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003199 default:
Calin Juravle11351682014-10-23 15:38:15 +01003200 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003201 }
3202}
3203
Calin Juravle34bacdf2014-10-07 20:23:36 +01003204void LocationsBuilderX86::VisitMul(HMul* mul) {
3205 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003206 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003207 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003208 case DataType::Type::kInt32:
Calin Juravle34bacdf2014-10-07 20:23:36 +01003209 locations->SetInAt(0, Location::RequiresRegister());
3210 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003211 if (mul->InputAt(1)->IsIntConstant()) {
3212 // Can use 3 operand multiply.
3213 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3214 } else {
3215 locations->SetOut(Location::SameAsFirstInput());
3216 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003217 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003218 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003219 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003220 locations->SetInAt(1, Location::Any());
3221 locations->SetOut(Location::SameAsFirstInput());
3222 // Needed for imul on 32bits with 64bits output.
3223 locations->AddTemp(Location::RegisterLocation(EAX));
3224 locations->AddTemp(Location::RegisterLocation(EDX));
3225 break;
3226 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003227 case DataType::Type::kFloat32:
3228 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003229 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003230 if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3231 DCHECK(mul->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003232 } else if (mul->InputAt(1)->IsConstant()) {
3233 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003234 } else {
3235 locations->SetInAt(1, Location::Any());
3236 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003237 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003238 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003239 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003240
3241 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003242 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003243 }
3244}
3245
3246void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
3247 LocationSummary* locations = mul->GetLocations();
3248 Location first = locations->InAt(0);
3249 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003250 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003251
3252 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003253 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003254 // The constant may have ended up in a register, so test explicitly to avoid
3255 // problems where the output may not be the same as the first operand.
3256 if (mul->InputAt(1)->IsIntConstant()) {
3257 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3258 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3259 } else if (second.IsRegister()) {
3260 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003261 __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003262 } else {
3263 DCHECK(second.IsStackSlot());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003264 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003265 __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003266 }
3267 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003268
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003269 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003270 Register in1_hi = first.AsRegisterPairHigh<Register>();
3271 Register in1_lo = first.AsRegisterPairLow<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003272 Register eax = locations->GetTemp(0).AsRegister<Register>();
3273 Register edx = locations->GetTemp(1).AsRegister<Register>();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003274
3275 DCHECK_EQ(EAX, eax);
3276 DCHECK_EQ(EDX, edx);
3277
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003278 // input: in1 - 64 bits, in2 - 64 bits.
Calin Juravle34bacdf2014-10-07 20:23:36 +01003279 // output: in1
3280 // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3281 // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3282 // parts: in1.lo = (in1.lo * in2.lo)[31:0]
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003283 if (second.IsConstant()) {
3284 DCHECK(second.GetConstant()->IsLongConstant());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003285
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003286 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3287 int32_t low_value = Low32Bits(value);
3288 int32_t high_value = High32Bits(value);
3289 Immediate low(low_value);
3290 Immediate high(high_value);
3291
3292 __ movl(eax, high);
3293 // eax <- in1.lo * in2.hi
3294 __ imull(eax, in1_lo);
3295 // in1.hi <- in1.hi * in2.lo
3296 __ imull(in1_hi, low);
3297 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3298 __ addl(in1_hi, eax);
3299 // move in2_lo to eax to prepare for double precision
3300 __ movl(eax, low);
3301 // edx:eax <- in1.lo * in2.lo
3302 __ mull(in1_lo);
3303 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3304 __ addl(in1_hi, edx);
3305 // in1.lo <- (in1.lo * in2.lo)[31:0];
3306 __ movl(in1_lo, eax);
3307 } else if (second.IsRegisterPair()) {
3308 Register in2_hi = second.AsRegisterPairHigh<Register>();
3309 Register in2_lo = second.AsRegisterPairLow<Register>();
3310
3311 __ movl(eax, in2_hi);
3312 // eax <- in1.lo * in2.hi
3313 __ imull(eax, in1_lo);
3314 // in1.hi <- in1.hi * in2.lo
3315 __ imull(in1_hi, in2_lo);
3316 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3317 __ addl(in1_hi, eax);
3318 // move in1_lo to eax to prepare for double precision
3319 __ movl(eax, in1_lo);
3320 // edx:eax <- in1.lo * in2.lo
3321 __ mull(in2_lo);
3322 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3323 __ addl(in1_hi, edx);
3324 // in1.lo <- (in1.lo * in2.lo)[31:0];
3325 __ movl(in1_lo, eax);
3326 } else {
3327 DCHECK(second.IsDoubleStackSlot()) << second;
3328 Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
3329 Address in2_lo(ESP, second.GetStackIndex());
3330
3331 __ movl(eax, in2_hi);
3332 // eax <- in1.lo * in2.hi
3333 __ imull(eax, in1_lo);
3334 // in1.hi <- in1.hi * in2.lo
3335 __ imull(in1_hi, in2_lo);
3336 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3337 __ addl(in1_hi, eax);
3338 // move in1_lo to eax to prepare for double precision
3339 __ movl(eax, in1_lo);
3340 // edx:eax <- in1.lo * in2.lo
3341 __ mull(in2_lo);
3342 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3343 __ addl(in1_hi, edx);
3344 // in1.lo <- (in1.lo * in2.lo)[31:0];
3345 __ movl(in1_lo, eax);
3346 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003347
3348 break;
3349 }
3350
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003351 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003352 DCHECK(first.Equals(locations->Out()));
3353 if (second.IsFpuRegister()) {
3354 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3355 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3356 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003357 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003358 __ mulss(first.AsFpuRegister<XmmRegister>(),
3359 codegen_->LiteralFloatAddress(
3360 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003361 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003362 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3363 } else {
3364 DCHECK(second.IsStackSlot());
3365 __ mulss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3366 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003367 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003368 }
3369
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003370 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003371 DCHECK(first.Equals(locations->Out()));
3372 if (second.IsFpuRegister()) {
3373 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3374 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3375 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003376 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003377 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3378 codegen_->LiteralDoubleAddress(
3379 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003380 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003381 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3382 } else {
3383 DCHECK(second.IsDoubleStackSlot());
3384 __ mulsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3385 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003386 break;
3387 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003388
3389 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003390 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003391 }
3392}
3393
Roland Levillain232ade02015-04-20 15:14:36 +01003394void InstructionCodeGeneratorX86::PushOntoFPStack(Location source,
3395 uint32_t temp_offset,
3396 uint32_t stack_adjustment,
3397 bool is_fp,
3398 bool is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003399 if (source.IsStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003400 DCHECK(!is_wide);
3401 if (is_fp) {
3402 __ flds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3403 } else {
3404 __ filds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3405 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003406 } else if (source.IsDoubleStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003407 DCHECK(is_wide);
3408 if (is_fp) {
3409 __ fldl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3410 } else {
3411 __ fildl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3412 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003413 } else {
3414 // Write the value to the temporary location on the stack and load to FP stack.
Roland Levillain232ade02015-04-20 15:14:36 +01003415 if (!is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003416 Location stack_temp = Location::StackSlot(temp_offset);
3417 codegen_->Move32(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003418 if (is_fp) {
3419 __ flds(Address(ESP, temp_offset));
3420 } else {
3421 __ filds(Address(ESP, temp_offset));
3422 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003423 } else {
3424 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3425 codegen_->Move64(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003426 if (is_fp) {
3427 __ fldl(Address(ESP, temp_offset));
3428 } else {
3429 __ fildl(Address(ESP, temp_offset));
3430 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003431 }
3432 }
3433}
3434
3435void InstructionCodeGeneratorX86::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003436 DataType::Type type = rem->GetResultType();
3437 bool is_float = type == DataType::Type::kFloat32;
3438 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003439 LocationSummary* locations = rem->GetLocations();
3440 Location first = locations->InAt(0);
3441 Location second = locations->InAt(1);
3442 Location out = locations->Out();
3443
3444 // Create stack space for 2 elements.
3445 // TODO: enhance register allocator to ask for stack temporaries.
3446 __ subl(ESP, Immediate(2 * elem_size));
3447
3448 // Load the values to the FP stack in reverse order, using temporaries if needed.
Roland Levillain232ade02015-04-20 15:14:36 +01003449 const bool is_wide = !is_float;
Andreas Gampe3db70682018-12-26 15:12:03 -08003450 PushOntoFPStack(second, elem_size, 2 * elem_size, /* is_fp= */ true, is_wide);
3451 PushOntoFPStack(first, 0, 2 * elem_size, /* is_fp= */ true, is_wide);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003452
3453 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003454 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003455 __ Bind(&retry);
3456 __ fprem();
3457
3458 // Move FP status to AX.
3459 __ fstsw();
3460
3461 // And see if the argument reduction is complete. This is signaled by the
3462 // C2 FPU flag bit set to 0.
3463 __ andl(EAX, Immediate(kC2ConditionMask));
3464 __ j(kNotEqual, &retry);
3465
3466 // We have settled on the final value. Retrieve it into an XMM register.
3467 // Store FP top of stack to real stack.
3468 if (is_float) {
3469 __ fsts(Address(ESP, 0));
3470 } else {
3471 __ fstl(Address(ESP, 0));
3472 }
3473
3474 // Pop the 2 items from the FP stack.
3475 __ fucompp();
3476
3477 // Load the value from the stack into an XMM register.
3478 DCHECK(out.IsFpuRegister()) << out;
3479 if (is_float) {
3480 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3481 } else {
3482 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3483 }
3484
3485 // And remove the temporary stack space we allocated.
3486 __ addl(ESP, Immediate(2 * elem_size));
3487}
3488
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003489
3490void InstructionCodeGeneratorX86::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3491 DCHECK(instruction->IsDiv() || instruction->IsRem());
3492
3493 LocationSummary* locations = instruction->GetLocations();
3494 DCHECK(locations->InAt(1).IsConstant());
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003495 DCHECK(locations->InAt(1).GetConstant()->IsIntConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003496
3497 Register out_register = locations->Out().AsRegister<Register>();
3498 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003499 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003500
3501 DCHECK(imm == 1 || imm == -1);
3502
3503 if (instruction->IsRem()) {
3504 __ xorl(out_register, out_register);
3505 } else {
3506 __ movl(out_register, input_register);
3507 if (imm == -1) {
3508 __ negl(out_register);
3509 }
3510 }
3511}
3512
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303513void InstructionCodeGeneratorX86::RemByPowerOfTwo(HRem* instruction) {
3514 LocationSummary* locations = instruction->GetLocations();
3515 Location second = locations->InAt(1);
3516
3517 Register out = locations->Out().AsRegister<Register>();
3518 Register numerator = locations->InAt(0).AsRegister<Register>();
3519
3520 int32_t imm = Int64FromConstant(second.GetConstant());
3521 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3522 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
3523
3524 Register tmp = locations->GetTemp(0).AsRegister<Register>();
3525 NearLabel done;
3526 __ movl(out, numerator);
3527 __ andl(out, Immediate(abs_imm-1));
3528 __ j(Condition::kZero, &done);
3529 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3530 __ testl(numerator, numerator);
3531 __ cmovl(Condition::kLess, out, tmp);
3532 __ Bind(&done);
3533}
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003534
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003535void InstructionCodeGeneratorX86::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003536 LocationSummary* locations = instruction->GetLocations();
3537
3538 Register out_register = locations->Out().AsRegister<Register>();
3539 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003540 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003541 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3542 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003543
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003544 Register num = locations->GetTemp(0).AsRegister<Register>();
3545
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003546 __ leal(num, Address(input_register, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003547 __ testl(input_register, input_register);
3548 __ cmovl(kGreaterEqual, num, input_register);
3549 int shift = CTZ(imm);
3550 __ sarl(num, Immediate(shift));
3551
3552 if (imm < 0) {
3553 __ negl(num);
3554 }
3555
3556 __ movl(out_register, num);
3557}
3558
3559void InstructionCodeGeneratorX86::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3560 DCHECK(instruction->IsDiv() || instruction->IsRem());
3561
3562 LocationSummary* locations = instruction->GetLocations();
3563 int imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
3564
3565 Register eax = locations->InAt(0).AsRegister<Register>();
3566 Register out = locations->Out().AsRegister<Register>();
3567 Register num;
3568 Register edx;
3569
3570 if (instruction->IsDiv()) {
3571 edx = locations->GetTemp(0).AsRegister<Register>();
3572 num = locations->GetTemp(1).AsRegister<Register>();
3573 } else {
3574 edx = locations->Out().AsRegister<Register>();
3575 num = locations->GetTemp(0).AsRegister<Register>();
3576 }
3577
3578 DCHECK_EQ(EAX, eax);
3579 DCHECK_EQ(EDX, edx);
3580 if (instruction->IsDiv()) {
3581 DCHECK_EQ(EAX, out);
3582 } else {
3583 DCHECK_EQ(EDX, out);
3584 }
3585
3586 int64_t magic;
3587 int shift;
Andreas Gampe3db70682018-12-26 15:12:03 -08003588 CalculateMagicAndShiftForDivRem(imm, /* is_long= */ false, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003589
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003590 // Save the numerator.
3591 __ movl(num, eax);
3592
3593 // EAX = magic
3594 __ movl(eax, Immediate(magic));
3595
3596 // EDX:EAX = magic * numerator
3597 __ imull(num);
3598
3599 if (imm > 0 && magic < 0) {
3600 // EDX += num
3601 __ addl(edx, num);
3602 } else if (imm < 0 && magic > 0) {
3603 __ subl(edx, num);
3604 }
3605
3606 // Shift if needed.
3607 if (shift != 0) {
3608 __ sarl(edx, Immediate(shift));
3609 }
3610
3611 // EDX += 1 if EDX < 0
3612 __ movl(eax, edx);
3613 __ shrl(edx, Immediate(31));
3614 __ addl(edx, eax);
3615
3616 if (instruction->IsRem()) {
3617 __ movl(eax, num);
3618 __ imull(edx, Immediate(imm));
3619 __ subl(eax, edx);
3620 __ movl(edx, eax);
3621 } else {
3622 __ movl(eax, edx);
3623 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003624}
3625
Calin Juravlebacfec32014-11-14 15:54:36 +00003626void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3627 DCHECK(instruction->IsDiv() || instruction->IsRem());
3628
3629 LocationSummary* locations = instruction->GetLocations();
3630 Location out = locations->Out();
3631 Location first = locations->InAt(0);
3632 Location second = locations->InAt(1);
3633 bool is_div = instruction->IsDiv();
3634
3635 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003636 case DataType::Type::kInt32: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003637 DCHECK_EQ(EAX, first.AsRegister<Register>());
3638 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
Calin Juravlebacfec32014-11-14 15:54:36 +00003639
Vladimir Marko13c86fd2015-11-11 12:37:46 +00003640 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003641 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003642
3643 if (imm == 0) {
3644 // Do not generate anything for 0. DivZeroCheck would forbid any generated code.
3645 } else if (imm == 1 || imm == -1) {
3646 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303647 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3648 if (is_div) {
3649 DivByPowerOfTwo(instruction->AsDiv());
3650 } else {
3651 RemByPowerOfTwo(instruction->AsRem());
3652 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003653 } else {
3654 DCHECK(imm <= -2 || imm >= 2);
3655 GenerateDivRemWithAnyConstant(instruction);
3656 }
3657 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01003658 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86(
David Srbecky9cd6d372016-02-09 15:24:47 +00003659 instruction, out.AsRegister<Register>(), is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003660 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003661
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003662 Register second_reg = second.AsRegister<Register>();
3663 // 0x80000000/-1 triggers an arithmetic exception!
3664 // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
3665 // it's safe to just use negl instead of more complex comparisons.
Calin Juravlebacfec32014-11-14 15:54:36 +00003666
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003667 __ cmpl(second_reg, Immediate(-1));
3668 __ j(kEqual, slow_path->GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +00003669
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003670 // edx:eax <- sign-extended of eax
3671 __ cdq();
3672 // eax = quotient, edx = remainder
3673 __ idivl(second_reg);
3674 __ Bind(slow_path->GetExitLabel());
3675 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003676 break;
3677 }
3678
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003679 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003680 InvokeRuntimeCallingConvention calling_convention;
3681 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
3682 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
3683 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
3684 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
3685 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
3686 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
3687
3688 if (is_div) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003689 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003690 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003691 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003692 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003693 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003694 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003695 break;
3696 }
3697
3698 default:
3699 LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
3700 }
3701}
3702
Calin Juravle7c4954d2014-10-28 16:57:40 +00003703void LocationsBuilderX86::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003704 LocationSummary::CallKind call_kind = (div->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003705 ? LocationSummary::kCallOnMainOnly
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003706 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01003707 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(div, call_kind);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003708
Calin Juravle7c4954d2014-10-28 16:57:40 +00003709 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003710 case DataType::Type::kInt32: {
Calin Juravled0d48522014-11-04 16:40:20 +00003711 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003712 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003713 locations->SetOut(Location::SameAsFirstInput());
3714 // Intel uses edx:eax as the dividend.
3715 locations->AddTemp(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003716 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3717 // which enforces results to be in EAX and EDX, things are simpler if we use EAX also as
3718 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003719 if (div->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003720 locations->AddTemp(Location::RequiresRegister());
3721 }
Calin Juravled0d48522014-11-04 16:40:20 +00003722 break;
3723 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003724 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003725 InvokeRuntimeCallingConvention calling_convention;
3726 locations->SetInAt(0, Location::RegisterPairLocation(
3727 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3728 locations->SetInAt(1, Location::RegisterPairLocation(
3729 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3730 // Runtime helper puts the result in EAX, EDX.
3731 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Calin Juravle7c4954d2014-10-28 16:57:40 +00003732 break;
3733 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003734 case DataType::Type::kFloat32:
3735 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003736 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003737 if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3738 DCHECK(div->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003739 } else if (div->InputAt(1)->IsConstant()) {
3740 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003741 } else {
3742 locations->SetInAt(1, Location::Any());
3743 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003744 locations->SetOut(Location::SameAsFirstInput());
3745 break;
3746 }
3747
3748 default:
3749 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3750 }
3751}
3752
3753void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
3754 LocationSummary* locations = div->GetLocations();
3755 Location first = locations->InAt(0);
3756 Location second = locations->InAt(1);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003757
3758 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003759 case DataType::Type::kInt32:
3760 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003761 GenerateDivRemIntegral(div);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003762 break;
3763 }
3764
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003765 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003766 if (second.IsFpuRegister()) {
3767 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3768 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3769 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003770 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003771 __ divss(first.AsFpuRegister<XmmRegister>(),
3772 codegen_->LiteralFloatAddress(
3773 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003774 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003775 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3776 } else {
3777 DCHECK(second.IsStackSlot());
3778 __ divss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3779 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003780 break;
3781 }
3782
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003783 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003784 if (second.IsFpuRegister()) {
3785 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3786 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3787 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003788 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003789 __ divsd(first.AsFpuRegister<XmmRegister>(),
3790 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003791 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3792 const_area->GetBaseMethodAddress(),
3793 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003794 } else {
3795 DCHECK(second.IsDoubleStackSlot());
3796 __ divsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3797 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003798 break;
3799 }
3800
3801 default:
3802 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3803 }
3804}
3805
Calin Juravlebacfec32014-11-14 15:54:36 +00003806void LocationsBuilderX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003807 DataType::Type type = rem->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003808
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003809 LocationSummary::CallKind call_kind = (rem->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003810 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003811 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01003812 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Calin Juravlebacfec32014-11-14 15:54:36 +00003813
Calin Juravled2ec87d2014-12-08 14:24:46 +00003814 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003815 case DataType::Type::kInt32: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003816 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003817 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003818 locations->SetOut(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003819 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3820 // which enforces results to be in EAX and EDX, things are simpler if we use EDX also as
3821 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003822 if (rem->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003823 locations->AddTemp(Location::RequiresRegister());
3824 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003825 break;
3826 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003827 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003828 InvokeRuntimeCallingConvention calling_convention;
3829 locations->SetInAt(0, Location::RegisterPairLocation(
3830 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3831 locations->SetInAt(1, Location::RegisterPairLocation(
3832 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3833 // Runtime helper puts the result in EAX, EDX.
3834 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
3835 break;
3836 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003837 case DataType::Type::kFloat64:
3838 case DataType::Type::kFloat32: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003839 locations->SetInAt(0, Location::Any());
3840 locations->SetInAt(1, Location::Any());
3841 locations->SetOut(Location::RequiresFpuRegister());
3842 locations->AddTemp(Location::RegisterLocation(EAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003843 break;
3844 }
3845
3846 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003847 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003848 }
3849}
3850
3851void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003852 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003853 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003854 case DataType::Type::kInt32:
3855 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003856 GenerateDivRemIntegral(rem);
3857 break;
3858 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003859 case DataType::Type::kFloat32:
3860 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003861 GenerateRemFP(rem);
Calin Juravlebacfec32014-11-14 15:54:36 +00003862 break;
3863 }
3864 default:
3865 LOG(FATAL) << "Unexpected rem type " << type;
3866 }
3867}
3868
Aart Bik1f8d51b2018-02-15 10:42:37 -08003869static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
3870 LocationSummary* locations = new (allocator) LocationSummary(minmax);
3871 switch (minmax->GetResultType()) {
3872 case DataType::Type::kInt32:
3873 locations->SetInAt(0, Location::RequiresRegister());
3874 locations->SetInAt(1, Location::RequiresRegister());
3875 locations->SetOut(Location::SameAsFirstInput());
3876 break;
3877 case DataType::Type::kInt64:
3878 locations->SetInAt(0, Location::RequiresRegister());
3879 locations->SetInAt(1, Location::RequiresRegister());
3880 locations->SetOut(Location::SameAsFirstInput());
3881 // Register to use to perform a long subtract to set cc.
3882 locations->AddTemp(Location::RequiresRegister());
3883 break;
3884 case DataType::Type::kFloat32:
3885 locations->SetInAt(0, Location::RequiresFpuRegister());
3886 locations->SetInAt(1, Location::RequiresFpuRegister());
3887 locations->SetOut(Location::SameAsFirstInput());
3888 locations->AddTemp(Location::RequiresRegister());
3889 break;
3890 case DataType::Type::kFloat64:
3891 locations->SetInAt(0, Location::RequiresFpuRegister());
3892 locations->SetInAt(1, Location::RequiresFpuRegister());
3893 locations->SetOut(Location::SameAsFirstInput());
3894 break;
3895 default:
3896 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
3897 }
3898}
3899
Aart Bik351df3e2018-03-07 11:54:57 -08003900void InstructionCodeGeneratorX86::GenerateMinMaxInt(LocationSummary* locations,
3901 bool is_min,
3902 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08003903 Location op1_loc = locations->InAt(0);
3904 Location op2_loc = locations->InAt(1);
3905
3906 // Shortcut for same input locations.
3907 if (op1_loc.Equals(op2_loc)) {
3908 // Can return immediately, as op1_loc == out_loc.
3909 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
3910 // a copy here.
3911 DCHECK(locations->Out().Equals(op1_loc));
3912 return;
3913 }
3914
3915 if (type == DataType::Type::kInt64) {
3916 // Need to perform a subtract to get the sign right.
3917 // op1 is already in the same location as the output.
3918 Location output = locations->Out();
3919 Register output_lo = output.AsRegisterPairLow<Register>();
3920 Register output_hi = output.AsRegisterPairHigh<Register>();
3921
3922 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
3923 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
3924
3925 // The comparison is performed by subtracting the second operand from
3926 // the first operand and then setting the status flags in the same
3927 // manner as the SUB instruction."
3928 __ cmpl(output_lo, op2_lo);
3929
3930 // Now use a temp and the borrow to finish the subtraction of op2_hi.
3931 Register temp = locations->GetTemp(0).AsRegister<Register>();
3932 __ movl(temp, output_hi);
3933 __ sbbl(temp, op2_hi);
3934
3935 // Now the condition code is correct.
3936 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
3937 __ cmovl(cond, output_lo, op2_lo);
3938 __ cmovl(cond, output_hi, op2_hi);
3939 } else {
3940 DCHECK_EQ(type, DataType::Type::kInt32);
3941 Register out = locations->Out().AsRegister<Register>();
3942 Register op2 = op2_loc.AsRegister<Register>();
3943
3944 // (out := op1)
3945 // out <=? op2
3946 // if out is min jmp done
3947 // out := op2
3948 // done:
3949
3950 __ cmpl(out, op2);
3951 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
3952 __ cmovl(cond, out, op2);
3953 }
3954}
3955
3956void InstructionCodeGeneratorX86::GenerateMinMaxFP(LocationSummary* locations,
3957 bool is_min,
3958 DataType::Type type) {
3959 Location op1_loc = locations->InAt(0);
3960 Location op2_loc = locations->InAt(1);
3961 Location out_loc = locations->Out();
3962 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
3963
3964 // Shortcut for same input locations.
3965 if (op1_loc.Equals(op2_loc)) {
3966 DCHECK(out_loc.Equals(op1_loc));
3967 return;
3968 }
3969
3970 // (out := op1)
3971 // out <=? op2
3972 // if Nan jmp Nan_label
3973 // if out is min jmp done
3974 // if op2 is min jmp op2_label
3975 // handle -0/+0
3976 // jmp done
3977 // Nan_label:
3978 // out := NaN
3979 // op2_label:
3980 // out := op2
3981 // done:
3982 //
3983 // This removes one jmp, but needs to copy one input (op1) to out.
3984 //
3985 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
3986
3987 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
3988
3989 NearLabel nan, done, op2_label;
3990 if (type == DataType::Type::kFloat64) {
3991 __ ucomisd(out, op2);
3992 } else {
3993 DCHECK_EQ(type, DataType::Type::kFloat32);
3994 __ ucomiss(out, op2);
3995 }
3996
3997 __ j(Condition::kParityEven, &nan);
3998
3999 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4000 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4001
4002 // Handle 0.0/-0.0.
4003 if (is_min) {
4004 if (type == DataType::Type::kFloat64) {
4005 __ orpd(out, op2);
4006 } else {
4007 __ orps(out, op2);
4008 }
4009 } else {
4010 if (type == DataType::Type::kFloat64) {
4011 __ andpd(out, op2);
4012 } else {
4013 __ andps(out, op2);
4014 }
4015 }
4016 __ jmp(&done);
4017
4018 // NaN handling.
4019 __ Bind(&nan);
4020 if (type == DataType::Type::kFloat64) {
4021 // TODO: Use a constant from the constant table (requires extra input).
4022 __ LoadLongConstant(out, kDoubleNaN);
4023 } else {
4024 Register constant = locations->GetTemp(0).AsRegister<Register>();
4025 __ movl(constant, Immediate(kFloatNaN));
4026 __ movd(out, constant);
4027 }
4028 __ jmp(&done);
4029
4030 // out := op2;
4031 __ Bind(&op2_label);
4032 if (type == DataType::Type::kFloat64) {
4033 __ movsd(out, op2);
4034 } else {
4035 __ movss(out, op2);
4036 }
4037
4038 // Done.
4039 __ Bind(&done);
4040}
4041
Aart Bik351df3e2018-03-07 11:54:57 -08004042void InstructionCodeGeneratorX86::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4043 DataType::Type type = minmax->GetResultType();
4044 switch (type) {
4045 case DataType::Type::kInt32:
4046 case DataType::Type::kInt64:
4047 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4048 break;
4049 case DataType::Type::kFloat32:
4050 case DataType::Type::kFloat64:
4051 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4052 break;
4053 default:
4054 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4055 }
4056}
4057
Aart Bik1f8d51b2018-02-15 10:42:37 -08004058void LocationsBuilderX86::VisitMin(HMin* min) {
4059 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4060}
4061
4062void InstructionCodeGeneratorX86::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004063 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004064}
4065
4066void LocationsBuilderX86::VisitMax(HMax* max) {
4067 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4068}
4069
4070void InstructionCodeGeneratorX86::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004071 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004072}
4073
Aart Bik3dad3412018-02-28 12:01:46 -08004074void LocationsBuilderX86::VisitAbs(HAbs* abs) {
4075 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4076 switch (abs->GetResultType()) {
4077 case DataType::Type::kInt32:
4078 locations->SetInAt(0, Location::RegisterLocation(EAX));
4079 locations->SetOut(Location::SameAsFirstInput());
4080 locations->AddTemp(Location::RegisterLocation(EDX));
4081 break;
4082 case DataType::Type::kInt64:
4083 locations->SetInAt(0, Location::RequiresRegister());
4084 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4085 locations->AddTemp(Location::RequiresRegister());
4086 break;
4087 case DataType::Type::kFloat32:
4088 locations->SetInAt(0, Location::RequiresFpuRegister());
4089 locations->SetOut(Location::SameAsFirstInput());
4090 locations->AddTemp(Location::RequiresFpuRegister());
4091 locations->AddTemp(Location::RequiresRegister());
4092 break;
4093 case DataType::Type::kFloat64:
4094 locations->SetInAt(0, Location::RequiresFpuRegister());
4095 locations->SetOut(Location::SameAsFirstInput());
4096 locations->AddTemp(Location::RequiresFpuRegister());
4097 break;
4098 default:
4099 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4100 }
4101}
4102
4103void InstructionCodeGeneratorX86::VisitAbs(HAbs* abs) {
4104 LocationSummary* locations = abs->GetLocations();
4105 switch (abs->GetResultType()) {
4106 case DataType::Type::kInt32: {
4107 Register out = locations->Out().AsRegister<Register>();
4108 DCHECK_EQ(out, EAX);
4109 Register temp = locations->GetTemp(0).AsRegister<Register>();
4110 DCHECK_EQ(temp, EDX);
4111 // Sign extend EAX into EDX.
4112 __ cdq();
4113 // XOR EAX with sign.
4114 __ xorl(EAX, EDX);
4115 // Subtract out sign to correct.
4116 __ subl(EAX, EDX);
4117 // The result is in EAX.
4118 break;
4119 }
4120 case DataType::Type::kInt64: {
4121 Location input = locations->InAt(0);
4122 Register input_lo = input.AsRegisterPairLow<Register>();
4123 Register input_hi = input.AsRegisterPairHigh<Register>();
4124 Location output = locations->Out();
4125 Register output_lo = output.AsRegisterPairLow<Register>();
4126 Register output_hi = output.AsRegisterPairHigh<Register>();
4127 Register temp = locations->GetTemp(0).AsRegister<Register>();
4128 // Compute the sign into the temporary.
4129 __ movl(temp, input_hi);
4130 __ sarl(temp, Immediate(31));
4131 // Store the sign into the output.
4132 __ movl(output_lo, temp);
4133 __ movl(output_hi, temp);
4134 // XOR the input to the output.
4135 __ xorl(output_lo, input_lo);
4136 __ xorl(output_hi, input_hi);
4137 // Subtract the sign.
4138 __ subl(output_lo, temp);
4139 __ sbbl(output_hi, temp);
4140 break;
4141 }
4142 case DataType::Type::kFloat32: {
4143 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4144 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4145 Register constant = locations->GetTemp(1).AsRegister<Register>();
4146 __ movl(constant, Immediate(INT32_C(0x7FFFFFFF)));
4147 __ movd(temp, constant);
4148 __ andps(out, temp);
4149 break;
4150 }
4151 case DataType::Type::kFloat64: {
4152 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4153 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4154 // TODO: Use a constant from the constant table (requires extra input).
4155 __ LoadLongConstant(temp, INT64_C(0x7FFFFFFFFFFFFFFF));
4156 __ andpd(out, temp);
4157 break;
4158 }
4159 default:
4160 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4161 }
4162}
4163
Calin Juravled0d48522014-11-04 16:40:20 +00004164void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004165 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004166 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004167 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004168 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004169 case DataType::Type::kInt8:
4170 case DataType::Type::kUint16:
4171 case DataType::Type::kInt16:
4172 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004173 locations->SetInAt(0, Location::Any());
4174 break;
4175 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004176 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004177 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
4178 if (!instruction->IsConstant()) {
4179 locations->AddTemp(Location::RequiresRegister());
4180 }
4181 break;
4182 }
4183 default:
4184 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
4185 }
Calin Juravled0d48522014-11-04 16:40:20 +00004186}
4187
4188void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004189 SlowPathCode* slow_path =
4190 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004191 codegen_->AddSlowPath(slow_path);
4192
4193 LocationSummary* locations = instruction->GetLocations();
4194 Location value = locations->InAt(0);
4195
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004196 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004197 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004198 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004199 case DataType::Type::kInt8:
4200 case DataType::Type::kUint16:
4201 case DataType::Type::kInt16:
4202 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004203 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004204 __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004205 __ j(kEqual, slow_path->GetEntryLabel());
4206 } else if (value.IsStackSlot()) {
4207 __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
4208 __ j(kEqual, slow_path->GetEntryLabel());
4209 } else {
4210 DCHECK(value.IsConstant()) << value;
4211 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004212 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004213 }
4214 }
4215 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004216 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004217 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004218 if (value.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004219 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004220 __ movl(temp, value.AsRegisterPairLow<Register>());
4221 __ orl(temp, value.AsRegisterPairHigh<Register>());
4222 __ j(kEqual, slow_path->GetEntryLabel());
4223 } else {
4224 DCHECK(value.IsConstant()) << value;
4225 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
4226 __ jmp(slow_path->GetEntryLabel());
4227 }
4228 }
4229 break;
4230 }
4231 default:
4232 LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004233 }
Calin Juravled0d48522014-11-04 16:40:20 +00004234}
4235
Calin Juravle9aec02f2014-11-18 23:06:35 +00004236void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
4237 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4238
4239 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004240 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004241
4242 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004243 case DataType::Type::kInt32:
4244 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004245 // Can't have Location::Any() and output SameAsFirstInput()
Calin Juravle9aec02f2014-11-18 23:06:35 +00004246 locations->SetInAt(0, Location::RequiresRegister());
Mark P Mendell73945692015-04-29 14:56:17 +00004247 // The shift count needs to be in CL or a constant.
4248 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
Calin Juravle9aec02f2014-11-18 23:06:35 +00004249 locations->SetOut(Location::SameAsFirstInput());
4250 break;
4251 }
4252 default:
4253 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4254 }
4255}
4256
4257void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
4258 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4259
4260 LocationSummary* locations = op->GetLocations();
4261 Location first = locations->InAt(0);
4262 Location second = locations->InAt(1);
4263 DCHECK(first.Equals(locations->Out()));
4264
4265 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004266 case DataType::Type::kInt32: {
Mark P Mendell73945692015-04-29 14:56:17 +00004267 DCHECK(first.IsRegister());
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004268 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004269 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004270 Register second_reg = second.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004271 DCHECK_EQ(ECX, second_reg);
4272 if (op->IsShl()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004273 __ shll(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004274 } else if (op->IsShr()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004275 __ sarl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004276 } else {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004277 __ shrl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004278 }
4279 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004280 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004281 if (shift == 0) {
4282 return;
4283 }
4284 Immediate imm(shift);
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004285 if (op->IsShl()) {
4286 __ shll(first_reg, imm);
4287 } else if (op->IsShr()) {
4288 __ sarl(first_reg, imm);
4289 } else {
4290 __ shrl(first_reg, imm);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004291 }
4292 }
4293 break;
4294 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004295 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004296 if (second.IsRegister()) {
4297 Register second_reg = second.AsRegister<Register>();
4298 DCHECK_EQ(ECX, second_reg);
4299 if (op->IsShl()) {
4300 GenerateShlLong(first, second_reg);
4301 } else if (op->IsShr()) {
4302 GenerateShrLong(first, second_reg);
4303 } else {
4304 GenerateUShrLong(first, second_reg);
4305 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004306 } else {
Mark P Mendell73945692015-04-29 14:56:17 +00004307 // Shift by a constant.
Roland Levillain5b5b9312016-03-22 14:57:31 +00004308 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004309 // Nothing to do if the shift is 0, as the input is already the output.
4310 if (shift != 0) {
4311 if (op->IsShl()) {
4312 GenerateShlLong(first, shift);
4313 } else if (op->IsShr()) {
4314 GenerateShrLong(first, shift);
4315 } else {
4316 GenerateUShrLong(first, shift);
4317 }
4318 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004319 }
4320 break;
4321 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00004322 default:
4323 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4324 }
4325}
4326
Mark P Mendell73945692015-04-29 14:56:17 +00004327void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, int shift) {
4328 Register low = loc.AsRegisterPairLow<Register>();
4329 Register high = loc.AsRegisterPairHigh<Register>();
Mark Mendellba56d062015-05-05 21:34:03 -04004330 if (shift == 1) {
4331 // This is just an addition.
4332 __ addl(low, low);
4333 __ adcl(high, high);
4334 } else if (shift == 32) {
Mark P Mendell73945692015-04-29 14:56:17 +00004335 // Shift by 32 is easy. High gets low, and low gets 0.
4336 codegen_->EmitParallelMoves(
4337 loc.ToLow(),
4338 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004339 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004340 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4341 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004342 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004343 } else if (shift > 32) {
4344 // Low part becomes 0. High part is low part << (shift-32).
4345 __ movl(high, low);
4346 __ shll(high, Immediate(shift - 32));
4347 __ xorl(low, low);
4348 } else {
4349 // Between 1 and 31.
4350 __ shld(high, low, Immediate(shift));
4351 __ shll(low, Immediate(shift));
4352 }
4353}
4354
Calin Juravle9aec02f2014-11-18 23:06:35 +00004355void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004356 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004357 __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
4358 __ shll(loc.AsRegisterPairLow<Register>(), shifter);
4359 __ testl(shifter, Immediate(32));
4360 __ j(kEqual, &done);
4361 __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
4362 __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
4363 __ Bind(&done);
4364}
4365
Mark P Mendell73945692015-04-29 14:56:17 +00004366void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, int shift) {
4367 Register low = loc.AsRegisterPairLow<Register>();
4368 Register high = loc.AsRegisterPairHigh<Register>();
4369 if (shift == 32) {
4370 // Need to copy the sign.
4371 DCHECK_NE(low, high);
4372 __ movl(low, high);
4373 __ sarl(high, Immediate(31));
4374 } else if (shift > 32) {
4375 DCHECK_NE(low, high);
4376 // High part becomes sign. Low part is shifted by shift - 32.
4377 __ movl(low, high);
4378 __ sarl(high, Immediate(31));
4379 __ sarl(low, Immediate(shift - 32));
4380 } else {
4381 // Between 1 and 31.
4382 __ shrd(low, high, Immediate(shift));
4383 __ sarl(high, Immediate(shift));
4384 }
4385}
4386
Calin Juravle9aec02f2014-11-18 23:06:35 +00004387void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004388 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004389 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4390 __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
4391 __ testl(shifter, Immediate(32));
4392 __ j(kEqual, &done);
4393 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4394 __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
4395 __ Bind(&done);
4396}
4397
Mark P Mendell73945692015-04-29 14:56:17 +00004398void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, int shift) {
4399 Register low = loc.AsRegisterPairLow<Register>();
4400 Register high = loc.AsRegisterPairHigh<Register>();
4401 if (shift == 32) {
4402 // Shift by 32 is easy. Low gets high, and high gets 0.
4403 codegen_->EmitParallelMoves(
4404 loc.ToHigh(),
4405 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004406 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004407 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4408 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004409 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004410 } else if (shift > 32) {
4411 // Low part is high >> (shift - 32). High part becomes 0.
4412 __ movl(low, high);
4413 __ shrl(low, Immediate(shift - 32));
4414 __ xorl(high, high);
4415 } else {
4416 // Between 1 and 31.
4417 __ shrd(low, high, Immediate(shift));
4418 __ shrl(high, Immediate(shift));
4419 }
4420}
4421
Calin Juravle9aec02f2014-11-18 23:06:35 +00004422void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004423 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004424 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4425 __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
4426 __ testl(shifter, Immediate(32));
4427 __ j(kEqual, &done);
4428 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4429 __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
4430 __ Bind(&done);
4431}
4432
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004433void LocationsBuilderX86::VisitRor(HRor* ror) {
4434 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004435 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004436
4437 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004438 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004439 // Add the temporary needed.
4440 locations->AddTemp(Location::RequiresRegister());
4441 FALLTHROUGH_INTENDED;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004442 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004443 locations->SetInAt(0, Location::RequiresRegister());
4444 // The shift count needs to be in CL (unless it is a constant).
4445 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, ror->InputAt(1)));
4446 locations->SetOut(Location::SameAsFirstInput());
4447 break;
4448 default:
4449 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4450 UNREACHABLE();
4451 }
4452}
4453
4454void InstructionCodeGeneratorX86::VisitRor(HRor* ror) {
4455 LocationSummary* locations = ror->GetLocations();
4456 Location first = locations->InAt(0);
4457 Location second = locations->InAt(1);
4458
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004459 if (ror->GetResultType() == DataType::Type::kInt32) {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004460 Register first_reg = first.AsRegister<Register>();
4461 if (second.IsRegister()) {
4462 Register second_reg = second.AsRegister<Register>();
4463 __ rorl(first_reg, second_reg);
4464 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004465 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004466 __ rorl(first_reg, imm);
4467 }
4468 return;
4469 }
4470
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004471 DCHECK_EQ(ror->GetResultType(), DataType::Type::kInt64);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004472 Register first_reg_lo = first.AsRegisterPairLow<Register>();
4473 Register first_reg_hi = first.AsRegisterPairHigh<Register>();
4474 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
4475 if (second.IsRegister()) {
4476 Register second_reg = second.AsRegister<Register>();
4477 DCHECK_EQ(second_reg, ECX);
4478 __ movl(temp_reg, first_reg_hi);
4479 __ shrd(first_reg_hi, first_reg_lo, second_reg);
4480 __ shrd(first_reg_lo, temp_reg, second_reg);
4481 __ movl(temp_reg, first_reg_hi);
4482 __ testl(second_reg, Immediate(32));
4483 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
4484 __ cmovl(kNotEqual, first_reg_lo, temp_reg);
4485 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004486 int32_t shift_amt = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004487 if (shift_amt == 0) {
4488 // Already fine.
4489 return;
4490 }
4491 if (shift_amt == 32) {
4492 // Just swap.
4493 __ movl(temp_reg, first_reg_lo);
4494 __ movl(first_reg_lo, first_reg_hi);
4495 __ movl(first_reg_hi, temp_reg);
4496 return;
4497 }
4498
4499 Immediate imm(shift_amt);
4500 // Save the constents of the low value.
4501 __ movl(temp_reg, first_reg_lo);
4502
4503 // Shift right into low, feeding bits from high.
4504 __ shrd(first_reg_lo, first_reg_hi, imm);
4505
4506 // Shift right into high, feeding bits from the original low.
4507 __ shrd(first_reg_hi, temp_reg, imm);
4508
4509 // Swap if needed.
4510 if (shift_amt > 32) {
4511 __ movl(temp_reg, first_reg_lo);
4512 __ movl(first_reg_lo, first_reg_hi);
4513 __ movl(first_reg_hi, temp_reg);
4514 }
4515 }
4516}
4517
Calin Juravle9aec02f2014-11-18 23:06:35 +00004518void LocationsBuilderX86::VisitShl(HShl* shl) {
4519 HandleShift(shl);
4520}
4521
4522void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
4523 HandleShift(shl);
4524}
4525
4526void LocationsBuilderX86::VisitShr(HShr* shr) {
4527 HandleShift(shr);
4528}
4529
4530void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
4531 HandleShift(shr);
4532}
4533
4534void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
4535 HandleShift(ushr);
4536}
4537
4538void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
4539 HandleShift(ushr);
4540}
4541
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004542void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004543 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4544 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004545 locations->SetOut(Location::RegisterLocation(EAX));
Alex Lightd109e302018-06-27 10:25:41 -07004546 InvokeRuntimeCallingConvention calling_convention;
4547 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004548}
4549
4550void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004551 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4552 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4553 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004554}
4555
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004556void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004557 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4558 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004559 locations->SetOut(Location::RegisterLocation(EAX));
4560 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004561 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4562 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004563}
4564
4565void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004566 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4567 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayd0958442017-01-30 14:57:16 +00004568 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004569 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004570 DCHECK(!codegen_->IsLeafMethod());
4571}
4572
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004573void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004574 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004575 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004576 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4577 if (location.IsStackSlot()) {
4578 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4579 } else if (location.IsDoubleStackSlot()) {
4580 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004581 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004582 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004583}
4584
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004585void InstructionCodeGeneratorX86::VisitParameterValue(
4586 HParameterValue* instruction ATTRIBUTE_UNUSED) {
4587}
4588
4589void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
4590 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004591 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004592 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4593}
4594
4595void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004596}
4597
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004598void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
4599 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004600 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004601 locations->SetInAt(0, Location::RequiresRegister());
4602 locations->SetOut(Location::RequiresRegister());
4603}
4604
4605void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) {
4606 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004607 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004608 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004609 instruction->GetIndex(), kX86PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004610 __ movl(locations->Out().AsRegister<Register>(),
4611 Address(locations->InAt(0).AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004612 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004613 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004614 instruction->GetIndex(), kX86PointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004615 __ movl(locations->Out().AsRegister<Register>(),
4616 Address(locations->InAt(0).AsRegister<Register>(),
4617 mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
4618 // temp = temp->GetImtEntryAt(method_offset);
4619 __ movl(locations->Out().AsRegister<Register>(),
4620 Address(locations->Out().AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004621 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004622}
4623
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004624void LocationsBuilderX86::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004625 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004626 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004627 locations->SetInAt(0, Location::RequiresRegister());
4628 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004629}
4630
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004631void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
4632 LocationSummary* locations = not_->GetLocations();
Roland Levillain70566432014-10-24 16:20:17 +01004633 Location in = locations->InAt(0);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004634 Location out = locations->Out();
Roland Levillain70566432014-10-24 16:20:17 +01004635 DCHECK(in.Equals(out));
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004636 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004637 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004638 __ notl(out.AsRegister<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004639 break;
4640
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004641 case DataType::Type::kInt64:
Roland Levillain70566432014-10-24 16:20:17 +01004642 __ notl(out.AsRegisterPairLow<Register>());
4643 __ notl(out.AsRegisterPairHigh<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004644 break;
4645
4646 default:
4647 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4648 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004649}
4650
David Brazdil66d126e2015-04-03 16:02:44 +01004651void LocationsBuilderX86::VisitBooleanNot(HBooleanNot* bool_not) {
4652 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004653 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004654 locations->SetInAt(0, Location::RequiresRegister());
4655 locations->SetOut(Location::SameAsFirstInput());
4656}
4657
4658void InstructionCodeGeneratorX86::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004659 LocationSummary* locations = bool_not->GetLocations();
4660 Location in = locations->InAt(0);
4661 Location out = locations->Out();
4662 DCHECK(in.Equals(out));
4663 __ xorl(out.AsRegister<Register>(), Immediate(1));
4664}
4665
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004666void LocationsBuilderX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004667 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004668 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00004669 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004670 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004671 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004672 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004673 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004674 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004675 case DataType::Type::kInt32:
4676 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00004677 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravleddb7df22014-11-25 20:56:51 +00004678 locations->SetInAt(1, Location::Any());
4679 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4680 break;
4681 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004682 case DataType::Type::kFloat32:
4683 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00004684 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004685 if (compare->InputAt(1)->IsX86LoadFromConstantTable()) {
4686 DCHECK(compare->InputAt(1)->IsEmittedAtUseSite());
4687 } else if (compare->InputAt(1)->IsConstant()) {
4688 locations->SetInAt(1, Location::RequiresFpuRegister());
4689 } else {
4690 locations->SetInAt(1, Location::Any());
4691 }
Calin Juravleddb7df22014-11-25 20:56:51 +00004692 locations->SetOut(Location::RequiresRegister());
4693 break;
4694 }
4695 default:
4696 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
4697 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004698}
4699
4700void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004701 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004702 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00004703 Location left = locations->InAt(0);
4704 Location right = locations->InAt(1);
4705
Mark Mendell0c9497d2015-08-21 09:30:05 -04004706 NearLabel less, greater, done;
Aart Bika19616e2016-02-01 18:57:58 -08004707 Condition less_cond = kLess;
4708
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004709 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004710 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004711 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004712 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004713 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004714 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004715 case DataType::Type::kInt32: {
Roland Levillain0b671c02016-08-19 12:02:34 +01004716 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08004717 break;
4718 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004719 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004720 Register left_low = left.AsRegisterPairLow<Register>();
4721 Register left_high = left.AsRegisterPairHigh<Register>();
4722 int32_t val_low = 0;
4723 int32_t val_high = 0;
4724 bool right_is_const = false;
4725
4726 if (right.IsConstant()) {
4727 DCHECK(right.GetConstant()->IsLongConstant());
4728 right_is_const = true;
4729 int64_t val = right.GetConstant()->AsLongConstant()->GetValue();
4730 val_low = Low32Bits(val);
4731 val_high = High32Bits(val);
4732 }
4733
Calin Juravleddb7df22014-11-25 20:56:51 +00004734 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004735 __ cmpl(left_high, right.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004736 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004737 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004738 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004739 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004740 codegen_->Compare32BitValue(left_high, val_high);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004741 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004742 __ j(kLess, &less); // Signed compare.
4743 __ j(kGreater, &greater); // Signed compare.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004744 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004745 __ cmpl(left_low, right.AsRegisterPairLow<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004746 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004747 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004748 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004749 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004750 codegen_->Compare32BitValue(left_low, val_low);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004751 }
Aart Bika19616e2016-02-01 18:57:58 -08004752 less_cond = kBelow; // for CF (unsigned).
Calin Juravleddb7df22014-11-25 20:56:51 +00004753 break;
4754 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004755 case DataType::Type::kFloat32: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004756 GenerateFPCompare(left, right, compare, false);
Calin Juravleddb7df22014-11-25 20:56:51 +00004757 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004758 less_cond = kBelow; // for CF (floats).
Calin Juravleddb7df22014-11-25 20:56:51 +00004759 break;
4760 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004761 case DataType::Type::kFloat64: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004762 GenerateFPCompare(left, right, compare, true);
Calin Juravleddb7df22014-11-25 20:56:51 +00004763 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004764 less_cond = kBelow; // for CF (floats).
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004765 break;
4766 }
4767 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00004768 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004769 }
Aart Bika19616e2016-02-01 18:57:58 -08004770
Calin Juravleddb7df22014-11-25 20:56:51 +00004771 __ movl(out, Immediate(0));
4772 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08004773 __ j(less_cond, &less);
Calin Juravleddb7df22014-11-25 20:56:51 +00004774
4775 __ Bind(&greater);
4776 __ movl(out, Immediate(1));
4777 __ jmp(&done);
4778
4779 __ Bind(&less);
4780 __ movl(out, Immediate(-1));
4781
4782 __ Bind(&done);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004783}
4784
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004785void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004786 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004787 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004788 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01004789 locations->SetInAt(i, Location::Any());
4790 }
4791 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004792}
4793
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004794void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01004795 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004796}
4797
Roland Levillain7c1559a2015-12-15 10:55:36 +00004798void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004799 /*
4800 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
4801 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86 memory model.
4802 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4803 */
4804 switch (kind) {
4805 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004806 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004807 break;
4808 }
4809 case MemBarrierKind::kAnyStore:
4810 case MemBarrierKind::kLoadAny:
4811 case MemBarrierKind::kStoreStore: {
4812 // nop
4813 break;
4814 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004815 case MemBarrierKind::kNTStoreStore:
4816 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004817 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004818 break;
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004819 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004820}
4821
Vladimir Markodc151b22015-10-15 18:02:30 +01004822HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
4823 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01004824 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004825 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01004826}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004827
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004828Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
4829 Register temp) {
4830 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Markoc53c0792015-11-19 15:48:33 +00004831 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004832 if (!invoke->GetLocations()->Intrinsified()) {
4833 return location.AsRegister<Register>();
4834 }
4835 // For intrinsics we allow any location, so it may be on the stack.
4836 if (!location.IsRegister()) {
4837 __ movl(temp, Address(ESP, location.GetStackIndex()));
4838 return temp;
4839 }
4840 // For register locations, check if the register was saved. If so, get it from the stack.
4841 // Note: There is a chance that the register was saved but not overwritten, so we could
4842 // save one load. However, since this is just an intrinsic slow path we prefer this
4843 // simple and more robust approach rather that trying to determine if that's the case.
4844 SlowPathCode* slow_path = GetCurrentSlowPath();
Vladimir Marko4ee8e292017-06-02 15:39:30 +00004845 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
4846 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
4847 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
4848 __ movl(temp, Address(ESP, stack_offset));
4849 return temp;
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004850 }
4851 return location.AsRegister<Register>();
4852}
4853
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004854void CodeGeneratorX86::GenerateStaticOrDirectCall(
4855 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Vladimir Marko58155012015-08-19 12:49:41 +00004856 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4857 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004858 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00004859 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004860 uint32_t offset =
4861 GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
4862 __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004863 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004864 }
Vladimir Marko58155012015-08-19 12:49:41 +00004865 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004866 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004867 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004868 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01004869 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01004870 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4871 temp.AsRegister<Register>());
4872 __ leal(temp.AsRegister<Register>(), Address(base_reg, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004873 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01004874 break;
4875 }
Vladimir Markob066d432018-01-03 13:14:37 +00004876 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4877 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4878 temp.AsRegister<Register>());
4879 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
4880 RecordBootImageRelRoPatch(
4881 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress(),
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004882 GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00004883 break;
4884 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004885 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004886 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4887 temp.AsRegister<Register>());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004888 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004889 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004890 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004891 break;
4892 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004893 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4894 __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
4895 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004896 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4897 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4898 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01004899 }
Vladimir Marko58155012015-08-19 12:49:41 +00004900 }
4901
4902 switch (invoke->GetCodePtrLocation()) {
4903 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4904 __ call(GetFrameEntryLabel());
4905 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004906 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4907 // (callee_method + offset_of_quick_compiled_code)()
4908 __ call(Address(callee_method.AsRegister<Register>(),
4909 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07004910 kX86PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004911 break;
Mark Mendell09ed1a32015-03-25 08:30:06 -04004912 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004913 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Mark Mendell09ed1a32015-03-25 08:30:06 -04004914
4915 DCHECK(!IsLeafMethod());
Mark Mendell09ed1a32015-03-25 08:30:06 -04004916}
4917
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004918void CodeGeneratorX86::GenerateVirtualCall(
4919 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004920 Register temp = temp_in.AsRegister<Register>();
4921 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4922 invoke->GetVTableIndex(), kX86PointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004923
4924 // Use the calling convention instead of the location of the receiver, as
4925 // intrinsics may have put the receiver in a different register. In the intrinsics
4926 // slow path, the arguments have been moved to the right place, so here we are
4927 // guaranteed that the receiver is the first register of the calling convention.
4928 InvokeDexCallingConvention calling_convention;
4929 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004930 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004931 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004932 __ movl(temp, Address(receiver, class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004933 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004934 // Instead of simply (possibly) unpoisoning `temp` here, we should
4935 // emit a read barrier for the previous class reference load.
4936 // However this is not required in practice, as this is an
4937 // intermediate/temporary reference and because the current
4938 // concurrent copying collector keeps the from-space memory
4939 // intact/accessible until the end of the marking phase (the
4940 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004941 __ MaybeUnpoisonHeapReference(temp);
4942 // temp = temp->GetMethodAt(method_offset);
4943 __ movl(temp, Address(temp, method_offset));
4944 // call temp->GetEntryPoint();
4945 __ call(Address(
Andreas Gampe542451c2016-07-26 09:02:02 -07004946 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004947 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004948}
4949
Vladimir Marko6fd16062018-06-26 11:02:04 +01004950void CodeGeneratorX86::RecordBootImageIntrinsicPatch(HX86ComputeBaseMethodAddress* method_address,
4951 uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01004952 boot_image_other_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08004953 method_address, /* target_dex_file= */ nullptr, intrinsic_data);
Vladimir Marko2d06e022019-07-08 15:45:19 +01004954 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004955}
4956
Vladimir Markob066d432018-01-03 13:14:37 +00004957void CodeGeneratorX86::RecordBootImageRelRoPatch(HX86ComputeBaseMethodAddress* method_address,
4958 uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01004959 boot_image_other_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08004960 method_address, /* target_dex_file= */ nullptr, boot_image_offset);
Vladimir Marko2d06e022019-07-08 15:45:19 +01004961 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00004962}
4963
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004964void CodeGeneratorX86::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
Vladimir Marko65979462017-05-19 17:25:12 +01004965 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004966 HX86ComputeBaseMethodAddress* method_address =
Vladimir Marko65979462017-05-19 17:25:12 +01004967 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004968 boot_image_method_patches_.emplace_back(
4969 method_address, invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01004970 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004971}
4972
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004973void CodeGeneratorX86::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
4974 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
4975 HX86ComputeBaseMethodAddress* method_address =
4976 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004977 // Add the patch entry and bind its label at the end of the instruction.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004978 method_bss_entry_patches_.emplace_back(
4979 method_address, &GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4980 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004981}
4982
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004983void CodeGeneratorX86::RecordBootImageTypePatch(HLoadClass* load_class) {
4984 HX86ComputeBaseMethodAddress* method_address =
4985 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
4986 boot_image_type_patches_.emplace_back(
4987 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004988 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004989}
4990
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004991Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004992 HX86ComputeBaseMethodAddress* method_address =
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004993 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
4994 type_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004995 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004996 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004997}
4998
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004999void CodeGeneratorX86::RecordBootImageStringPatch(HLoadString* load_string) {
5000 HX86ComputeBaseMethodAddress* method_address =
5001 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
5002 boot_image_string_patches_.emplace_back(
5003 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
5004 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01005005}
5006
Vladimir Markoaad75c62016-10-03 08:46:48 +00005007Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005008 HX86ComputeBaseMethodAddress* method_address =
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005009 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005010 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005011 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005012 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005013}
5014
Vladimir Markoeebb8212018-06-05 14:57:24 +01005015void CodeGeneratorX86::LoadBootImageAddress(Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01005016 uint32_t boot_image_reference,
Vladimir Markoeebb8212018-06-05 14:57:24 +01005017 HInvokeStaticOrDirect* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01005018 if (GetCompilerOptions().IsBootImage()) {
5019 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5020 HX86ComputeBaseMethodAddress* method_address =
5021 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5022 DCHECK(method_address != nullptr);
5023 Register method_address_reg =
5024 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5025 __ leal(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
5026 RecordBootImageIntrinsicPatch(method_address, boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01005027 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01005028 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5029 HX86ComputeBaseMethodAddress* method_address =
5030 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5031 DCHECK(method_address != nullptr);
5032 Register method_address_reg =
5033 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5034 __ movl(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko6fd16062018-06-26 11:02:04 +01005035 RecordBootImageRelRoPatch(method_address, boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01005036 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005037 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01005038 gc::Heap* heap = Runtime::Current()->GetHeap();
5039 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01005040 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01005041 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
5042 }
5043}
5044
Vladimir Marko6fd16062018-06-26 11:02:04 +01005045void CodeGeneratorX86::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
5046 uint32_t boot_image_offset) {
5047 DCHECK(invoke->IsStatic());
5048 InvokeRuntimeCallingConvention calling_convention;
5049 Register argument = calling_convention.GetRegisterAt(0);
5050 if (GetCompilerOptions().IsBootImage()) {
5051 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
5052 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
5053 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5054 HX86ComputeBaseMethodAddress* method_address =
5055 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5056 DCHECK(method_address != nullptr);
5057 Register method_address_reg =
5058 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5059 __ leal(argument, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
5060 MethodReference target_method = invoke->GetTargetMethod();
5061 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
5062 boot_image_type_patches_.emplace_back(method_address, target_method.dex_file, type_idx.index_);
5063 __ Bind(&boot_image_type_patches_.back().label);
5064 } else {
5065 LoadBootImageAddress(argument, boot_image_offset, invoke);
5066 }
5067 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
5068 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
5069}
5070
Vladimir Markoaad75c62016-10-03 08:46:48 +00005071// The label points to the end of the "movl" or another instruction but the literal offset
5072// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
5073constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
5074
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005075template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00005076inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005077 const ArenaDeque<X86PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005078 ArenaVector<linker::LinkerPatch>* linker_patches) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005079 for (const X86PcRelativePatchInfo& info : infos) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005080 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005081 linker_patches->push_back(Factory(literal_offset,
5082 info.target_dex_file,
5083 GetMethodAddressOffset(info.method_address),
5084 info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005085 }
5086}
5087
Vladimir Marko6fd16062018-06-26 11:02:04 +01005088template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
5089linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
5090 const DexFile* target_dex_file,
5091 uint32_t pc_insn_offset,
5092 uint32_t boot_image_offset) {
5093 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
5094 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00005095}
5096
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005097void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00005098 DCHECK(linker_patches->empty());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005099 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01005100 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005101 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00005102 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01005103 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005104 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01005105 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01005106 boot_image_other_patches_.size();
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005107 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01005108 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005109 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
5110 boot_image_method_patches_, linker_patches);
5111 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
5112 boot_image_type_patches_, linker_patches);
5113 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005114 boot_image_string_patches_, linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005115 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01005116 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005117 DCHECK(boot_image_type_patches_.empty());
5118 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01005119 }
5120 if (GetCompilerOptions().IsBootImage()) {
5121 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
5122 boot_image_other_patches_, linker_patches);
5123 } else {
5124 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
5125 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005126 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005127 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
5128 method_bss_entry_patches_, linker_patches);
5129 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
5130 type_bss_entry_patches_, linker_patches);
5131 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
5132 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00005133 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00005134}
5135
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005136void CodeGeneratorX86::MarkGCCard(Register temp,
5137 Register card,
5138 Register object,
5139 Register value,
5140 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005141 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005142 if (value_can_be_null) {
5143 __ testl(value, value);
5144 __ j(kEqual, &is_null);
5145 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005146 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005147 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01005148 // Calculate the offset (in the card table) of the card corresponding to
5149 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005150 __ movl(temp, object);
5151 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005152 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5153 // `object`'s card.
5154 //
5155 // Register `card` contains the address of the card table. Note that the card
5156 // table's base is biased during its creation so that it always starts at an
5157 // address whose least-significant byte is equal to `kCardDirty` (see
5158 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5159 // below writes the `kCardDirty` (byte) value into the `object`'s card
5160 // (located at `card + object >> kCardShift`).
5161 //
5162 // This dual use of the value in register `card` (1. to calculate the location
5163 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5164 // (no need to explicitly load `kCardDirty` as an immediate value).
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00005165 __ movb(Address(temp, card, TIMES_1, 0),
5166 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005167 if (value_can_be_null) {
5168 __ Bind(&is_null);
5169 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005170}
5171
Calin Juravle52c48962014-12-16 17:02:57 +00005172void LocationsBuilderX86::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
5173 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005174
5175 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005176 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005177 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005178 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5179 kEmitCompilerReadBarrier
5180 ? LocationSummary::kCallOnSlowPath
5181 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005182 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005183 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005184 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005185 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005186
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005187 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005188 locations->SetOut(Location::RequiresFpuRegister());
5189 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005190 // The output overlaps in case of long: we don't want the low move
5191 // to overwrite the object's location. Likewise, in the case of
5192 // an object field get with read barriers enabled, we do not want
5193 // the move to overwrite the object's location, as we need it to emit
5194 // the read barrier.
5195 locations->SetOut(
5196 Location::RequiresRegister(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005197 (object_field_get_with_read_barrier || instruction->GetType() == DataType::Type::kInt64) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005198 Location::kOutputOverlap :
5199 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005200 }
Calin Juravle52c48962014-12-16 17:02:57 +00005201
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005202 if (field_info.IsVolatile() && (field_info.GetFieldType() == DataType::Type::kInt64)) {
Calin Juravle52c48962014-12-16 17:02:57 +00005203 // Long values can be loaded atomically into an XMM using movsd.
Roland Levillain7c1559a2015-12-15 10:55:36 +00005204 // So we use an XMM register as a temp to achieve atomicity (first
5205 // load the temp into the XMM and then copy the XMM into the
5206 // output, 32 bits at a time).
Calin Juravle52c48962014-12-16 17:02:57 +00005207 locations->AddTemp(Location::RequiresFpuRegister());
5208 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005209}
5210
Calin Juravle52c48962014-12-16 17:02:57 +00005211void InstructionCodeGeneratorX86::HandleFieldGet(HInstruction* instruction,
5212 const FieldInfo& field_info) {
5213 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005214
Calin Juravle52c48962014-12-16 17:02:57 +00005215 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005216 Location base_loc = locations->InAt(0);
5217 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00005218 Location out = locations->Out();
5219 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01005220 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
5221 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00005222 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5223
Vladimir Marko61b92282017-10-11 13:23:17 +01005224 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005225 case DataType::Type::kBool:
5226 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005227 __ movzxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005228 break;
5229 }
5230
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005231 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005232 __ movsxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005233 break;
5234 }
5235
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005236 case DataType::Type::kUint16: {
5237 __ movzxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005238 break;
5239 }
5240
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005241 case DataType::Type::kInt16: {
5242 __ movsxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005243 break;
5244 }
5245
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005246 case DataType::Type::kInt32:
Calin Juravle52c48962014-12-16 17:02:57 +00005247 __ movl(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005248 break;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005249
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005250 case DataType::Type::kReference: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005251 // /* HeapReference<Object> */ out = *(base + offset)
5252 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005253 // Note that a potential implicit null check is handled in this
5254 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
5255 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005256 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005257 if (is_volatile) {
5258 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5259 }
5260 } else {
5261 __ movl(out.AsRegister<Register>(), Address(base, offset));
5262 codegen_->MaybeRecordImplicitNullCheck(instruction);
5263 if (is_volatile) {
5264 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5265 }
5266 // If read barriers are enabled, emit read barriers other than
5267 // Baker's using a slow path (and also unpoison the loaded
5268 // reference, if heap poisoning is enabled).
5269 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
5270 }
5271 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005272 }
5273
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005274 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005275 if (is_volatile) {
5276 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
5277 __ movsd(temp, Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005278 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005279 __ movd(out.AsRegisterPairLow<Register>(), temp);
5280 __ psrlq(temp, Immediate(32));
5281 __ movd(out.AsRegisterPairHigh<Register>(), temp);
5282 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005283 DCHECK_NE(base, out.AsRegisterPairLow<Register>());
Calin Juravle52c48962014-12-16 17:02:57 +00005284 __ movl(out.AsRegisterPairLow<Register>(), Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005285 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005286 __ movl(out.AsRegisterPairHigh<Register>(), Address(base, kX86WordSize + offset));
5287 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005288 break;
5289 }
5290
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005291 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00005292 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005293 break;
5294 }
5295
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005296 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005297 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005298 break;
5299 }
5300
Aart Bik66c158e2018-01-31 12:55:04 -08005301 case DataType::Type::kUint32:
5302 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005303 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01005304 LOG(FATAL) << "Unreachable type " << load_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005305 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005306 }
Calin Juravle52c48962014-12-16 17:02:57 +00005307
Vladimir Marko61b92282017-10-11 13:23:17 +01005308 if (load_type == DataType::Type::kReference || load_type == DataType::Type::kInt64) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005309 // Potential implicit null checks, in the case of reference or
5310 // long fields, are handled in the previous switch statement.
5311 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005312 codegen_->MaybeRecordImplicitNullCheck(instruction);
5313 }
5314
Calin Juravle52c48962014-12-16 17:02:57 +00005315 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01005316 if (load_type == DataType::Type::kReference) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005317 // Memory barriers, in the case of references, are also handled
5318 // in the previous switch statement.
5319 } else {
5320 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5321 }
Roland Levillain4d027112015-07-01 15:41:14 +01005322 }
Calin Juravle52c48962014-12-16 17:02:57 +00005323}
5324
5325void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
5326 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5327
5328 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005329 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00005330 locations->SetInAt(0, Location::RequiresRegister());
5331 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005332 DataType::Type field_type = field_info.GetFieldType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005333 bool is_byte_type = DataType::Size(field_type) == 1u;
Calin Juravle52c48962014-12-16 17:02:57 +00005334
5335 // The register allocator does not support multiple
5336 // inputs that die at entry with one in a specific register.
5337 if (is_byte_type) {
5338 // Ensure the value is in a byte register.
5339 locations->SetInAt(1, Location::RegisterLocation(EAX));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005340 } else if (DataType::IsFloatingPointType(field_type)) {
5341 if (is_volatile && field_type == DataType::Type::kFloat64) {
Mark Mendell81489372015-11-04 11:30:41 -05005342 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5343 locations->SetInAt(1, Location::RequiresFpuRegister());
5344 } else {
5345 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
5346 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005347 } else if (is_volatile && field_type == DataType::Type::kInt64) {
Mark Mendell81489372015-11-04 11:30:41 -05005348 // In order to satisfy the semantics of volatile, this must be a single instruction store.
Calin Juravle52c48962014-12-16 17:02:57 +00005349 locations->SetInAt(1, Location::RequiresRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005350
Calin Juravle52c48962014-12-16 17:02:57 +00005351 // 64bits value can be atomically written to an address with movsd and an XMM register.
5352 // We need two XMM registers because there's no easier way to (bit) copy a register pair
5353 // into a single XMM register (we copy each pair part into the XMMs and then interleave them).
5354 // NB: We could make the register allocator understand fp_reg <-> core_reg moves but given the
5355 // isolated cases when we need this it isn't worth adding the extra complexity.
5356 locations->AddTemp(Location::RequiresFpuRegister());
5357 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005358 } else {
5359 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5360
5361 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
5362 // Temporary registers for the write barrier.
5363 locations->AddTemp(Location::RequiresRegister()); // May be used for reference poisoning too.
5364 // Ensure the card is in a byte register.
5365 locations->AddTemp(Location::RegisterLocation(ECX));
5366 }
Calin Juravle52c48962014-12-16 17:02:57 +00005367 }
5368}
5369
5370void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005371 const FieldInfo& field_info,
5372 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00005373 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5374
5375 LocationSummary* locations = instruction->GetLocations();
5376 Register base = locations->InAt(0).AsRegister<Register>();
5377 Location value = locations->InAt(1);
5378 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005379 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00005380 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01005381 bool needs_write_barrier =
5382 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00005383
5384 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005385 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00005386 }
5387
Mark Mendell81489372015-11-04 11:30:41 -05005388 bool maybe_record_implicit_null_check_done = false;
5389
Calin Juravle52c48962014-12-16 17:02:57 +00005390 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005391 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005392 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005393 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005394 __ movb(Address(base, offset), value.AsRegister<ByteRegister>());
5395 break;
5396 }
5397
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005398 case DataType::Type::kUint16:
5399 case DataType::Type::kInt16: {
Mark Mendell81489372015-11-04 11:30:41 -05005400 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005401 __ movw(Address(base, offset),
5402 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell81489372015-11-04 11:30:41 -05005403 } else {
5404 __ movw(Address(base, offset), value.AsRegister<Register>());
5405 }
Calin Juravle52c48962014-12-16 17:02:57 +00005406 break;
5407 }
5408
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005409 case DataType::Type::kInt32:
5410 case DataType::Type::kReference: {
Roland Levillain4d027112015-07-01 15:41:14 +01005411 if (kPoisonHeapReferences && needs_write_barrier) {
5412 // Note that in the case where `value` is a null reference,
5413 // we do not enter this block, as the reference does not
5414 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005415 DCHECK_EQ(field_type, DataType::Type::kReference);
Roland Levillain4d027112015-07-01 15:41:14 +01005416 Register temp = locations->GetTemp(0).AsRegister<Register>();
5417 __ movl(temp, value.AsRegister<Register>());
5418 __ PoisonHeapReference(temp);
5419 __ movl(Address(base, offset), temp);
Mark Mendell81489372015-11-04 11:30:41 -05005420 } else if (value.IsConstant()) {
5421 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5422 __ movl(Address(base, offset), Immediate(v));
Roland Levillain4d027112015-07-01 15:41:14 +01005423 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +00005424 DCHECK(value.IsRegister()) << value;
Roland Levillain4d027112015-07-01 15:41:14 +01005425 __ movl(Address(base, offset), value.AsRegister<Register>());
5426 }
Calin Juravle52c48962014-12-16 17:02:57 +00005427 break;
5428 }
5429
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005430 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005431 if (is_volatile) {
5432 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
5433 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
5434 __ movd(temp1, value.AsRegisterPairLow<Register>());
5435 __ movd(temp2, value.AsRegisterPairHigh<Register>());
5436 __ punpckldq(temp1, temp2);
5437 __ movsd(Address(base, offset), temp1);
Calin Juravle77520bc2015-01-12 18:45:46 +00005438 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell81489372015-11-04 11:30:41 -05005439 } else if (value.IsConstant()) {
5440 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5441 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5442 codegen_->MaybeRecordImplicitNullCheck(instruction);
5443 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
Calin Juravle52c48962014-12-16 17:02:57 +00005444 } else {
5445 __ movl(Address(base, offset), value.AsRegisterPairLow<Register>());
Calin Juravle77520bc2015-01-12 18:45:46 +00005446 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005447 __ movl(Address(base, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
5448 }
Mark Mendell81489372015-11-04 11:30:41 -05005449 maybe_record_implicit_null_check_done = true;
Calin Juravle52c48962014-12-16 17:02:57 +00005450 break;
5451 }
5452
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005453 case DataType::Type::kFloat32: {
Mark Mendell81489372015-11-04 11:30:41 -05005454 if (value.IsConstant()) {
5455 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5456 __ movl(Address(base, offset), Immediate(v));
5457 } else {
5458 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5459 }
Calin Juravle52c48962014-12-16 17:02:57 +00005460 break;
5461 }
5462
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005463 case DataType::Type::kFloat64: {
Mark Mendell81489372015-11-04 11:30:41 -05005464 if (value.IsConstant()) {
5465 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5466 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5467 codegen_->MaybeRecordImplicitNullCheck(instruction);
5468 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
5469 maybe_record_implicit_null_check_done = true;
5470 } else {
5471 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5472 }
Calin Juravle52c48962014-12-16 17:02:57 +00005473 break;
5474 }
5475
Aart Bik66c158e2018-01-31 12:55:04 -08005476 case DataType::Type::kUint32:
5477 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005478 case DataType::Type::kVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00005479 LOG(FATAL) << "Unreachable type " << field_type;
5480 UNREACHABLE();
5481 }
5482
Mark Mendell81489372015-11-04 11:30:41 -05005483 if (!maybe_record_implicit_null_check_done) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005484 codegen_->MaybeRecordImplicitNullCheck(instruction);
5485 }
5486
Roland Levillain4d027112015-07-01 15:41:14 +01005487 if (needs_write_barrier) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005488 Register temp = locations->GetTemp(0).AsRegister<Register>();
5489 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005490 codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005491 }
5492
Calin Juravle52c48962014-12-16 17:02:57 +00005493 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005494 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005495 }
5496}
5497
5498void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5499 HandleFieldGet(instruction, instruction->GetFieldInfo());
5500}
5501
5502void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5503 HandleFieldGet(instruction, instruction->GetFieldInfo());
5504}
5505
5506void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5507 HandleFieldSet(instruction, instruction->GetFieldInfo());
5508}
5509
5510void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005511 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005512}
5513
5514void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5515 HandleFieldSet(instruction, instruction->GetFieldInfo());
5516}
5517
5518void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005519 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005520}
5521
5522void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5523 HandleFieldGet(instruction, instruction->GetFieldInfo());
5524}
5525
5526void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5527 HandleFieldGet(instruction, instruction->GetFieldInfo());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005528}
5529
Vladimir Marko552a1342017-10-31 10:56:47 +00005530void LocationsBuilderX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5531 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(EAX));
5532}
5533
5534void InstructionCodeGeneratorX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5535 __ movl(EAX, Immediate(instruction->GetFormat()->GetValue()));
5536 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5537}
5538
Calin Juravlee460d1d2015-09-29 04:52:17 +01005539void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
5540 HUnresolvedInstanceFieldGet* instruction) {
5541 FieldAccessCallingConventionX86 calling_convention;
5542 codegen_->CreateUnresolvedFieldLocationSummary(
5543 instruction, instruction->GetFieldType(), calling_convention);
5544}
5545
5546void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
5547 HUnresolvedInstanceFieldGet* instruction) {
5548 FieldAccessCallingConventionX86 calling_convention;
5549 codegen_->GenerateUnresolvedFieldAccess(instruction,
5550 instruction->GetFieldType(),
5551 instruction->GetFieldIndex(),
5552 instruction->GetDexPc(),
5553 calling_convention);
5554}
5555
5556void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
5557 HUnresolvedInstanceFieldSet* instruction) {
5558 FieldAccessCallingConventionX86 calling_convention;
5559 codegen_->CreateUnresolvedFieldLocationSummary(
5560 instruction, instruction->GetFieldType(), calling_convention);
5561}
5562
5563void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
5564 HUnresolvedInstanceFieldSet* instruction) {
5565 FieldAccessCallingConventionX86 calling_convention;
5566 codegen_->GenerateUnresolvedFieldAccess(instruction,
5567 instruction->GetFieldType(),
5568 instruction->GetFieldIndex(),
5569 instruction->GetDexPc(),
5570 calling_convention);
5571}
5572
5573void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
5574 HUnresolvedStaticFieldGet* instruction) {
5575 FieldAccessCallingConventionX86 calling_convention;
5576 codegen_->CreateUnresolvedFieldLocationSummary(
5577 instruction, instruction->GetFieldType(), calling_convention);
5578}
5579
5580void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
5581 HUnresolvedStaticFieldGet* instruction) {
5582 FieldAccessCallingConventionX86 calling_convention;
5583 codegen_->GenerateUnresolvedFieldAccess(instruction,
5584 instruction->GetFieldType(),
5585 instruction->GetFieldIndex(),
5586 instruction->GetDexPc(),
5587 calling_convention);
5588}
5589
5590void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
5591 HUnresolvedStaticFieldSet* instruction) {
5592 FieldAccessCallingConventionX86 calling_convention;
5593 codegen_->CreateUnresolvedFieldLocationSummary(
5594 instruction, instruction->GetFieldType(), calling_convention);
5595}
5596
5597void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
5598 HUnresolvedStaticFieldSet* instruction) {
5599 FieldAccessCallingConventionX86 calling_convention;
5600 codegen_->GenerateUnresolvedFieldAccess(instruction,
5601 instruction->GetFieldType(),
5602 instruction->GetFieldIndex(),
5603 instruction->GetDexPc(),
5604 calling_convention);
5605}
5606
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005607void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005608 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5609 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5610 ? Location::RequiresRegister()
5611 : Location::Any();
5612 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005613}
5614
Calin Juravle2ae48182016-03-16 14:05:09 +00005615void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
5616 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005617 return;
5618 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005619 LocationSummary* locations = instruction->GetLocations();
5620 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005621
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005622 __ testl(EAX, Address(obj.AsRegister<Register>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005623 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005624}
5625
Calin Juravle2ae48182016-03-16 14:05:09 +00005626void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005627 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005628 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005629
5630 LocationSummary* locations = instruction->GetLocations();
5631 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005632
5633 if (obj.IsRegister()) {
Mark Mendell42514f62015-03-31 11:34:22 -04005634 __ testl(obj.AsRegister<Register>(), obj.AsRegister<Register>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005635 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005636 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005637 } else {
5638 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005639 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005640 __ jmp(slow_path->GetEntryLabel());
5641 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005642 }
5643 __ j(kEqual, slow_path->GetEntryLabel());
5644}
5645
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005646void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005647 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005648}
5649
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005650void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005651 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005652 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005653 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005654 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5655 object_array_get_with_read_barrier
5656 ? LocationSummary::kCallOnSlowPath
5657 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005658 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005659 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005660 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005661 locations->SetInAt(0, Location::RequiresRegister());
5662 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005663 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005664 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5665 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005666 // The output overlaps in case of long: we don't want the low move
5667 // to overwrite the array's location. Likewise, in the case of an
5668 // object array get with read barriers enabled, we do not want the
5669 // move to overwrite the array's location, as we need it to emit
5670 // the read barrier.
5671 locations->SetOut(
5672 Location::RequiresRegister(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005673 (instruction->GetType() == DataType::Type::kInt64 || object_array_get_with_read_barrier)
5674 ? Location::kOutputOverlap
5675 : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005676 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005677}
5678
5679void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
5680 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005681 Location obj_loc = locations->InAt(0);
5682 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005683 Location index = locations->InAt(1);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005684 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005685 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005686
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005687 DataType::Type type = instruction->GetType();
Calin Juravle77520bc2015-01-12 18:45:46 +00005688 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005689 case DataType::Type::kBool:
5690 case DataType::Type::kUint8: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005691 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005692 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005693 break;
5694 }
5695
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005696 case DataType::Type::kInt8: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005697 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005698 __ movsxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005699 break;
5700 }
5701
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005702 case DataType::Type::kUint16: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005703 Register out = out_loc.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07005704 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5705 // Branch cases into compressed and uncompressed for each index's type.
5706 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5707 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005708 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005709 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005710 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5711 "Expecting 0=compressed, 1=uncompressed");
5712 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005713 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
5714 __ jmp(&done);
5715 __ Bind(&not_compressed);
5716 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5717 __ Bind(&done);
5718 } else {
5719 // Common case for charAt of array of char or when string compression's
5720 // feature is turned off.
5721 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5722 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005723 break;
5724 }
5725
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005726 case DataType::Type::kInt16: {
5727 Register out = out_loc.AsRegister<Register>();
5728 __ movsxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5729 break;
5730 }
5731
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005732 case DataType::Type::kInt32: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005733 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005734 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005735 break;
5736 }
5737
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005738 case DataType::Type::kReference: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005739 static_assert(
5740 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5741 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00005742 // /* HeapReference<Object> */ out =
5743 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5744 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005745 // Note that a potential implicit null check is handled in this
5746 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
5747 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005748 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005749 } else {
5750 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005751 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
5752 codegen_->MaybeRecordImplicitNullCheck(instruction);
5753 // If read barriers are enabled, emit read barriers other than
5754 // Baker's using a slow path (and also unpoison the loaded
5755 // reference, if heap poisoning is enabled).
Roland Levillain7c1559a2015-12-15 10:55:36 +00005756 if (index.IsConstant()) {
5757 uint32_t offset =
5758 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005759 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5760 } else {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005761 codegen_->MaybeGenerateReadBarrierSlow(
5762 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5763 }
5764 }
5765 break;
5766 }
5767
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005768 case DataType::Type::kInt64: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005769 DCHECK_NE(obj, out_loc.AsRegisterPairLow<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005770 __ movl(out_loc.AsRegisterPairLow<Register>(),
5771 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
5772 codegen_->MaybeRecordImplicitNullCheck(instruction);
5773 __ movl(out_loc.AsRegisterPairHigh<Register>(),
5774 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset + kX86WordSize));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005775 break;
5776 }
5777
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005778 case DataType::Type::kFloat32: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005779 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005780 __ movss(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005781 break;
5782 }
5783
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005784 case DataType::Type::kFloat64: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005785 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005786 __ movsd(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005787 break;
5788 }
5789
Aart Bik66c158e2018-01-31 12:55:04 -08005790 case DataType::Type::kUint32:
5791 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005792 case DataType::Type::kVoid:
Calin Juravle77520bc2015-01-12 18:45:46 +00005793 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005794 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005795 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005796
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005797 if (type == DataType::Type::kReference || type == DataType::Type::kInt64) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005798 // Potential implicit null checks, in the case of reference or
5799 // long arrays, are handled in the previous switch statement.
5800 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005801 codegen_->MaybeRecordImplicitNullCheck(instruction);
5802 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005803}
5804
5805void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005806 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005807
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005808 bool needs_write_barrier =
5809 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005810 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005811
Vladimir Markoca6fff82017-10-03 14:49:14 +01005812 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffray39468442014-09-02 15:17:15 +01005813 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005814 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005815
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005816 bool is_byte_type = DataType::Size(value_type) == 1u;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005817 // We need the inputs to be different than the output in case of long operation.
5818 // In case of a byte operation, the register allocator does not support multiple
5819 // inputs that die at entry with one in a specific register.
5820 locations->SetInAt(0, Location::RequiresRegister());
5821 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5822 if (is_byte_type) {
5823 // Ensure the value is in a byte register.
5824 locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005825 } else if (DataType::IsFloatingPointType(value_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05005826 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005827 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005828 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5829 }
5830 if (needs_write_barrier) {
5831 // Temporary registers for the write barrier.
5832 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
5833 // Ensure the card is in a byte register.
Roland Levillain4f6b0b52015-11-23 19:29:22 +00005834 locations->AddTemp(Location::RegisterLocation(ECX));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005835 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005836}
5837
5838void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
5839 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840 Location array_loc = locations->InAt(0);
5841 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005842 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005843 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005844 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005845 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005846 bool needs_write_barrier =
5847 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005848
5849 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005850 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005851 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005852 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005853 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005854 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005855 if (value.IsRegister()) {
5856 __ movb(address, value.AsRegister<ByteRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005857 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005858 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005859 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005860 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005861 break;
5862 }
5863
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005864 case DataType::Type::kUint16:
5865 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005866 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005867 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005868 if (value.IsRegister()) {
5869 __ movw(address, value.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005870 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005871 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005872 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005873 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005874 break;
5875 }
5876
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005877 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005878 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005879 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005880
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005881 if (!value.IsRegister()) {
5882 // Just setting null.
5883 DCHECK(instruction->InputAt(2)->IsNullConstant());
5884 DCHECK(value.IsConstant()) << value;
5885 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005886 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005887 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005888 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005889 break;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005890 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005891
5892 DCHECK(needs_write_barrier);
5893 Register register_value = value.AsRegister<Register>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005894 Location temp_loc = locations->GetTemp(0);
5895 Register temp = temp_loc.AsRegister<Register>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005896
5897 bool can_value_be_null = instruction->GetValueCanBeNull();
5898 NearLabel do_store;
5899 if (can_value_be_null) {
5900 __ testl(register_value, register_value);
5901 __ j(kEqual, &do_store);
5902 }
5903
5904 SlowPathCode* slow_path = nullptr;
5905 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005906 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005907 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005908
5909 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5910 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5911 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005912
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005913 // Note that when Baker read barriers are enabled, the type
5914 // checks are performed without read barriers. This is fine,
5915 // even in the case where a class object is in the from-space
5916 // after the flip, as a comparison involving such a type would
5917 // not produce a false positive; it may of course produce a
5918 // false negative, in which case we would take the ArraySet
5919 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005920
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005921 // /* HeapReference<Class> */ temp = array->klass_
5922 __ movl(temp, Address(array, class_offset));
5923 codegen_->MaybeRecordImplicitNullCheck(instruction);
5924 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005925
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005926 // /* HeapReference<Class> */ temp = temp->component_type_
5927 __ movl(temp, Address(temp, component_offset));
5928 // If heap poisoning is enabled, no need to unpoison `temp`
5929 // nor the object reference in `register_value->klass`, as
5930 // we are comparing two poisoned references.
5931 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005932
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005933 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005934 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005935 __ j(kEqual, &do_put);
5936 // If heap poisoning is enabled, the `temp` reference has
5937 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005938 __ MaybeUnpoisonHeapReference(temp);
5939
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005940 // If heap poisoning is enabled, no need to unpoison the
5941 // heap reference loaded below, as it is only used for a
5942 // comparison with null.
5943 __ cmpl(Address(temp, super_offset), Immediate(0));
5944 __ j(kNotEqual, slow_path->GetEntryLabel());
5945 __ Bind(&do_put);
5946 } else {
5947 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005948 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005949 }
5950
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005951 Register card = locations->GetTemp(1).AsRegister<Register>();
5952 codegen_->MarkGCCard(
5953 temp, card, array, value.AsRegister<Register>(), /* value_can_be_null= */ false);
5954
5955 if (can_value_be_null) {
5956 DCHECK(do_store.IsLinked());
5957 __ Bind(&do_store);
5958 }
5959
5960 Register source = register_value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005961 if (kPoisonHeapReferences) {
5962 __ movl(temp, register_value);
5963 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005964 source = temp;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005965 }
5966
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005967 __ movl(address, source);
5968
5969 if (can_value_be_null || !needs_type_check) {
5970 codegen_->MaybeRecordImplicitNullCheck(instruction);
5971 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005972
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005973 if (slow_path != nullptr) {
5974 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005975 }
5976
5977 break;
5978 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005979
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005980 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005981 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005982 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005983 if (value.IsRegister()) {
5984 __ movl(address, value.AsRegister<Register>());
5985 } else {
5986 DCHECK(value.IsConstant()) << value;
5987 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5988 __ movl(address, Immediate(v));
5989 }
5990 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005991 break;
5992 }
5993
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005994 case DataType::Type::kInt64: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005995 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005996 if (value.IsRegisterPair()) {
5997 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5998 value.AsRegisterPairLow<Register>());
5999 codegen_->MaybeRecordImplicitNullCheck(instruction);
6000 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
6001 value.AsRegisterPairHigh<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006002 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006003 DCHECK(value.IsConstant());
6004 int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
6005 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
6006 Immediate(Low32Bits(val)));
6007 codegen_->MaybeRecordImplicitNullCheck(instruction);
6008 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
6009 Immediate(High32Bits(val)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006010 }
6011 break;
6012 }
6013
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006014 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006015 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006016 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006017 if (value.IsFpuRegister()) {
6018 __ movss(address, value.AsFpuRegister<XmmRegister>());
6019 } else {
6020 DCHECK(value.IsConstant());
6021 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
6022 __ movl(address, Immediate(v));
6023 }
6024 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006025 break;
6026 }
6027
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006028 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006029 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006030 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006031 if (value.IsFpuRegister()) {
6032 __ movsd(address, value.AsFpuRegister<XmmRegister>());
6033 } else {
6034 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006035 Address address_hi =
6036 CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset + kX86WordSize);
Mark Mendell81489372015-11-04 11:30:41 -05006037 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
6038 __ movl(address, Immediate(Low32Bits(v)));
6039 codegen_->MaybeRecordImplicitNullCheck(instruction);
6040 __ movl(address_hi, Immediate(High32Bits(v)));
6041 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006042 break;
6043 }
6044
Aart Bik66c158e2018-01-31 12:55:04 -08006045 case DataType::Type::kUint32:
6046 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006047 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006048 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07006049 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006050 }
6051}
6052
6053void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006054 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01006055 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04006056 if (!instruction->IsEmittedAtUseSite()) {
6057 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6058 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006059}
6060
6061void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04006062 if (instruction->IsEmittedAtUseSite()) {
6063 return;
6064 }
6065
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006066 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01006067 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00006068 Register obj = locations->InAt(0).AsRegister<Register>();
6069 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006070 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00006071 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07006072 // Mask out most significant bit in case the array is String's array of char.
6073 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006074 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006075 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006076}
6077
6078void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006079 RegisterSet caller_saves = RegisterSet::Empty();
6080 InvokeRuntimeCallingConvention calling_convention;
6081 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6082 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
6083 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05006084 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04006085 HInstruction* length = instruction->InputAt(1);
6086 if (!length->IsEmittedAtUseSite()) {
6087 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
6088 }
jessicahandojo4877b792016-09-08 19:49:13 -07006089 // Need register to see array's length.
6090 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
6091 locations->AddTemp(Location::RequiresRegister());
6092 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006093}
6094
6095void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
jessicahandojo4877b792016-09-08 19:49:13 -07006096 const bool is_string_compressed_char_at =
6097 mirror::kUseStringCompression && instruction->IsStringCharAt();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006098 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05006099 Location index_loc = locations->InAt(0);
6100 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006101 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006102 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006103
Mark Mendell99dbd682015-04-22 16:18:52 -04006104 if (length_loc.IsConstant()) {
6105 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
6106 if (index_loc.IsConstant()) {
6107 // BCE will remove the bounds check if we are guarenteed to pass.
6108 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6109 if (index < 0 || index >= length) {
6110 codegen_->AddSlowPath(slow_path);
6111 __ jmp(slow_path->GetEntryLabel());
6112 } else {
6113 // Some optimization after BCE may have generated this, and we should not
6114 // generate a bounds check if it is a valid range.
6115 }
6116 return;
6117 }
6118
6119 // We have to reverse the jump condition because the length is the constant.
6120 Register index_reg = index_loc.AsRegister<Register>();
6121 __ cmpl(index_reg, Immediate(length));
6122 codegen_->AddSlowPath(slow_path);
6123 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006124 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04006125 HInstruction* array_length = instruction->InputAt(1);
6126 if (array_length->IsEmittedAtUseSite()) {
6127 // Address the length field in the array.
6128 DCHECK(array_length->IsArrayLength());
6129 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
6130 Location array_loc = array_length->GetLocations()->InAt(0);
6131 Address array_len(array_loc.AsRegister<Register>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07006132 if (is_string_compressed_char_at) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006133 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
6134 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07006135 Register length_reg = locations->GetTemp(0).AsRegister<Register>();
6136 __ movl(length_reg, array_len);
6137 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006138 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006139 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04006140 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006141 // Checking bounds for general case:
6142 // Array of char or string's array with feature compression off.
6143 if (index_loc.IsConstant()) {
6144 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6145 __ cmpl(array_len, Immediate(value));
6146 } else {
6147 __ cmpl(array_len, index_loc.AsRegister<Register>());
6148 }
6149 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04006150 }
Mark Mendell99dbd682015-04-22 16:18:52 -04006151 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006152 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04006153 }
6154 codegen_->AddSlowPath(slow_path);
6155 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006156 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006157}
6158
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006159void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006160 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006161}
6162
6163void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006164 if (instruction->GetNext()->IsSuspendCheck() &&
6165 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6166 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6167 // The back edge will generate the suspend check.
6168 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6169 }
6170
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006171 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6172}
6173
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006174void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006175 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6176 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07006177 // In suspend check slow path, usually there are no caller-save registers at all.
6178 // If SIMD instructions are present, however, we force spilling all live SIMD
6179 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07006180 locations->SetCustomSlowPathCallerSaves(
6181 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006182}
6183
6184void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006185 HBasicBlock* block = instruction->GetBlock();
6186 if (block->GetLoopInformation() != nullptr) {
6187 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6188 // The back edge will generate the suspend check.
6189 return;
6190 }
6191 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6192 // The goto will generate the suspend check.
6193 return;
6194 }
6195 GenerateSuspendCheck(instruction, nullptr);
6196}
6197
6198void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
6199 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006200 SuspendCheckSlowPathX86* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006201 down_cast<SuspendCheckSlowPathX86*>(instruction->GetSlowPath());
6202 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006203 slow_path =
6204 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006205 instruction->SetSlowPath(slow_path);
6206 codegen_->AddSlowPath(slow_path);
6207 if (successor != nullptr) {
6208 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006209 }
6210 } else {
6211 DCHECK_EQ(slow_path->GetSuccessor(), successor);
6212 }
6213
Andreas Gampe542451c2016-07-26 09:02:02 -07006214 __ fs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00006215 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006216 if (successor == nullptr) {
6217 __ j(kNotEqual, slow_path->GetEntryLabel());
6218 __ Bind(slow_path->GetReturnLabel());
6219 } else {
6220 __ j(kEqual, codegen_->GetLabelOf(successor));
6221 __ jmp(slow_path->GetEntryLabel());
6222 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006223}
6224
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006225X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
6226 return codegen_->GetAssembler();
6227}
6228
Aart Bikcfe50bb2017-12-12 14:54:12 -08006229void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006230 ScratchRegisterScope ensure_scratch(
6231 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6232 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
6233 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
Mark Mendell7c8d0092015-01-26 11:21:33 -05006234
Aart Bikcfe50bb2017-12-12 14:54:12 -08006235 // Now that temp register is available (possibly spilled), move blocks of memory.
6236 for (int i = 0; i < number_of_words; i++) {
6237 __ movl(temp_reg, Address(ESP, src + stack_offset));
6238 __ movl(Address(ESP, dst + stack_offset), temp_reg);
6239 stack_offset += kX86WordSize;
6240 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006241}
6242
6243void ParallelMoveResolverX86::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006244 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006245 Location source = move->GetSource();
6246 Location destination = move->GetDestination();
6247
6248 if (source.IsRegister()) {
6249 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006250 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006251 } else if (destination.IsFpuRegister()) {
6252 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006253 } else {
6254 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006255 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006256 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006257 } else if (source.IsRegisterPair()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006258 size_t elem_size = DataType::Size(DataType::Type::kInt32);
David Brazdil74eb1b22015-12-14 11:44:01 +00006259 // Create stack space for 2 elements.
6260 __ subl(ESP, Immediate(2 * elem_size));
6261 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
6262 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
6263 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
6264 // And remove the temporary stack space we allocated.
6265 __ addl(ESP, Immediate(2 * elem_size));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006266 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006267 if (destination.IsRegister()) {
6268 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
6269 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006270 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006271 } else if (destination.IsRegisterPair()) {
6272 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
6273 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
6274 __ psrlq(src_reg, Immediate(32));
6275 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006276 } else if (destination.IsStackSlot()) {
6277 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006278 } else if (destination.IsDoubleStackSlot()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006279 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006280 } else {
6281 DCHECK(destination.IsSIMDStackSlot());
6282 __ movups(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05006283 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006284 } else if (source.IsStackSlot()) {
6285 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006286 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006287 } else if (destination.IsFpuRegister()) {
6288 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006289 } else {
6290 DCHECK(destination.IsStackSlot());
Aart Bikcfe50bb2017-12-12 14:54:12 -08006291 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006292 }
6293 } else if (source.IsDoubleStackSlot()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006294 if (destination.IsRegisterPair()) {
6295 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
6296 __ movl(destination.AsRegisterPairHigh<Register>(),
6297 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
6298 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006299 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6300 } else {
6301 DCHECK(destination.IsDoubleStackSlot()) << destination;
Aart Bikcfe50bb2017-12-12 14:54:12 -08006302 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006303 }
Aart Bik5576f372017-03-23 16:17:37 -07006304 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006305 if (destination.IsFpuRegister()) {
6306 __ movups(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6307 } else {
6308 DCHECK(destination.IsSIMDStackSlot());
6309 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
6310 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006311 } else if (source.IsConstant()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006312 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00006313 if (constant->IsIntConstant() || constant->IsNullConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006314 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006315 if (destination.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006316 if (value == 0) {
6317 __ xorl(destination.AsRegister<Register>(), destination.AsRegister<Register>());
6318 } else {
6319 __ movl(destination.AsRegister<Register>(), Immediate(value));
6320 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006321 } else {
6322 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell09b84632015-02-13 17:48:38 -05006323 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006324 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006325 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006326 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00006327 int32_t value = bit_cast<int32_t, float>(fp_value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006328 Immediate imm(value);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006329 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006330 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
6331 if (value == 0) {
6332 // Easy handling of 0.0.
6333 __ xorps(dest, dest);
6334 } else {
6335 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006336 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6337 Register temp = static_cast<Register>(ensure_scratch.GetRegister());
6338 __ movl(temp, Immediate(value));
6339 __ movd(dest, temp);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006340 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006341 } else {
6342 DCHECK(destination.IsStackSlot()) << destination;
6343 __ movl(Address(ESP, destination.GetStackIndex()), imm);
6344 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006345 } else if (constant->IsLongConstant()) {
6346 int64_t value = constant->AsLongConstant()->GetValue();
6347 int32_t low_value = Low32Bits(value);
6348 int32_t high_value = High32Bits(value);
6349 Immediate low(low_value);
6350 Immediate high(high_value);
6351 if (destination.IsDoubleStackSlot()) {
6352 __ movl(Address(ESP, destination.GetStackIndex()), low);
6353 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
6354 } else {
6355 __ movl(destination.AsRegisterPairLow<Register>(), low);
6356 __ movl(destination.AsRegisterPairHigh<Register>(), high);
6357 }
6358 } else {
6359 DCHECK(constant->IsDoubleConstant());
6360 double dbl_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00006361 int64_t value = bit_cast<int64_t, double>(dbl_value);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006362 int32_t low_value = Low32Bits(value);
6363 int32_t high_value = High32Bits(value);
6364 Immediate low(low_value);
6365 Immediate high(high_value);
6366 if (destination.IsFpuRegister()) {
6367 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
6368 if (value == 0) {
6369 // Easy handling of 0.0.
6370 __ xorpd(dest, dest);
6371 } else {
6372 __ pushl(high);
6373 __ pushl(low);
6374 __ movsd(dest, Address(ESP, 0));
6375 __ addl(ESP, Immediate(8));
6376 }
6377 } else {
6378 DCHECK(destination.IsDoubleStackSlot()) << destination;
6379 __ movl(Address(ESP, destination.GetStackIndex()), low);
6380 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
6381 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006382 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006383 } else {
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00006384 LOG(FATAL) << "Unimplemented move: " << destination << " <- " << source;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006385 }
6386}
6387
Mark Mendella5c19ce2015-04-01 12:51:05 -04006388void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006389 Register suggested_scratch = reg == EAX ? EBX : EAX;
6390 ScratchRegisterScope ensure_scratch(
6391 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
6392
6393 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
6394 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
6395 __ movl(Address(ESP, mem + stack_offset), reg);
6396 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006397}
6398
Mark Mendell7c8d0092015-01-26 11:21:33 -05006399void ParallelMoveResolverX86::Exchange32(XmmRegister reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006400 ScratchRegisterScope ensure_scratch(
6401 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6402
6403 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
6404 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
6405 __ movl(temp_reg, Address(ESP, mem + stack_offset));
6406 __ movss(Address(ESP, mem + stack_offset), reg);
6407 __ movd(reg, temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006408}
6409
Aart Bikcfe50bb2017-12-12 14:54:12 -08006410void ParallelMoveResolverX86::Exchange128(XmmRegister reg, int mem) {
6411 size_t extra_slot = 4 * kX86WordSize;
6412 __ subl(ESP, Immediate(extra_slot));
6413 __ movups(Address(ESP, 0), XmmRegister(reg));
6414 ExchangeMemory(0, mem + extra_slot, 4);
6415 __ movups(XmmRegister(reg), Address(ESP, 0));
6416 __ addl(ESP, Immediate(extra_slot));
6417}
6418
6419void ParallelMoveResolverX86::ExchangeMemory(int mem1, int mem2, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006420 ScratchRegisterScope ensure_scratch1(
6421 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006422
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006423 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
6424 ScratchRegisterScope ensure_scratch2(
6425 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006426
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006427 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
6428 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
Aart Bikcfe50bb2017-12-12 14:54:12 -08006429
6430 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
6431 for (int i = 0; i < number_of_words; i++) {
6432 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
6433 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
6434 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
6435 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
6436 stack_offset += kX86WordSize;
6437 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006438}
6439
6440void ParallelMoveResolverX86::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006441 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006442 Location source = move->GetSource();
6443 Location destination = move->GetDestination();
6444
6445 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell90979812015-07-28 16:41:21 -04006446 // Use XOR swap algorithm to avoid serializing XCHG instruction or using a temporary.
6447 DCHECK_NE(destination.AsRegister<Register>(), source.AsRegister<Register>());
6448 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
6449 __ xorl(source.AsRegister<Register>(), destination.AsRegister<Register>());
6450 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006451 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006452 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006453 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006454 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006455 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006456 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006457 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
6458 // Use XOR Swap algorithm to avoid a temporary.
6459 DCHECK_NE(source.reg(), destination.reg());
6460 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
6461 __ xorpd(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
6462 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
6463 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
6464 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6465 } else if (destination.IsFpuRegister() && source.IsStackSlot()) {
6466 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006467 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
6468 // Take advantage of the 16 bytes in the XMM register.
6469 XmmRegister reg = source.AsFpuRegister<XmmRegister>();
6470 Address stack(ESP, destination.GetStackIndex());
6471 // Load the double into the high doubleword.
6472 __ movhpd(reg, stack);
6473
6474 // Store the low double into the destination.
6475 __ movsd(stack, reg);
6476
6477 // Move the high double to the low double.
6478 __ psrldq(reg, Immediate(8));
6479 } else if (destination.IsFpuRegister() && source.IsDoubleStackSlot()) {
6480 // Take advantage of the 16 bytes in the XMM register.
6481 XmmRegister reg = destination.AsFpuRegister<XmmRegister>();
6482 Address stack(ESP, source.GetStackIndex());
6483 // Load the double into the high doubleword.
6484 __ movhpd(reg, stack);
6485
6486 // Store the low double into the destination.
6487 __ movsd(stack, reg);
6488
6489 // Move the high double to the low double.
6490 __ psrldq(reg, Immediate(8));
6491 } else if (destination.IsDoubleStackSlot() && source.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006492 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
6493 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
6494 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
6495 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
6496 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6497 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
6498 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006499 } else {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006500 LOG(FATAL) << "Unimplemented: source: " << source << ", destination: " << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006501 }
6502}
6503
6504void ParallelMoveResolverX86::SpillScratch(int reg) {
6505 __ pushl(static_cast<Register>(reg));
6506}
6507
6508void ParallelMoveResolverX86::RestoreScratch(int reg) {
6509 __ popl(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006510}
6511
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006512HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
6513 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006514 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006515 case HLoadClass::LoadKind::kInvalid:
6516 LOG(FATAL) << "UNREACHABLE";
6517 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006518 case HLoadClass::LoadKind::kReferrersClass:
6519 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006520 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006521 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006522 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko764d4542017-05-16 10:31:41 +01006523 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006524 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006525 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006526 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006527 DCHECK(Runtime::Current()->UseJitCompilation());
6528 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006529 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006530 break;
6531 }
6532 return desired_class_load_kind;
6533}
6534
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006535void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006536 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006537 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006538 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00006539 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006540 cls,
6541 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00006542 Location::RegisterLocation(EAX));
Vladimir Markoea4c1262017-02-06 19:59:33 +00006543 DCHECK_EQ(calling_convention.GetRegisterAt(0), EAX);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006544 return;
6545 }
Vladimir Marko41559982017-01-06 14:04:23 +00006546 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006547
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006548 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6549 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006550 ? LocationSummary::kCallOnSlowPath
6551 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006552 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006553 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006554 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006555 }
6556
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006557 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006558 load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006559 load_kind == HLoadClass::LoadKind::kBootImageRelRo ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006560 load_kind == HLoadClass::LoadKind::kBssEntry) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006561 locations->SetInAt(0, Location::RequiresRegister());
6562 }
6563 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006564 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6565 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6566 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006567 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006568 } else {
6569 // For non-Baker read barrier we have a temp-clobbering call.
6570 }
6571 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006572}
6573
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006574Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006575 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006576 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006577 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006578 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006579 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006580 PatchInfo<Label>* info = &jit_class_patches_.back();
6581 return &info->label;
6582}
6583
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006584// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6585// move.
6586void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006587 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006588 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006589 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006590 return;
6591 }
Vladimir Marko41559982017-01-06 14:04:23 +00006592 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006593
Vladimir Marko41559982017-01-06 14:04:23 +00006594 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006595 Location out_loc = locations->Out();
6596 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006597
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006598 bool generate_null_check = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006599 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6600 ? kWithoutReadBarrier
6601 : kCompilerReadBarrierOption;
Vladimir Marko41559982017-01-06 14:04:23 +00006602 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006603 case HLoadClass::LoadKind::kReferrersClass: {
6604 DCHECK(!cls->CanCallRuntime());
6605 DCHECK(!cls->MustGenerateClinitCheck());
6606 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6607 Register current_method = locations->InAt(0).AsRegister<Register>();
6608 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006609 cls,
6610 out_loc,
6611 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006612 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006613 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006614 break;
6615 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006616 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006617 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6618 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006619 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006620 Register method_address = locations->InAt(0).AsRegister<Register>();
6621 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006622 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006623 break;
6624 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006625 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006626 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6627 Register method_address = locations->InAt(0).AsRegister<Register>();
6628 __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006629 codegen_->RecordBootImageRelRoPatch(cls->InputAt(0)->AsX86ComputeBaseMethodAddress(),
6630 codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006631 break;
6632 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006633 case HLoadClass::LoadKind::kBssEntry: {
6634 Register method_address = locations->InAt(0).AsRegister<Register>();
6635 Address address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6636 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6637 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006638 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006639 generate_null_check = true;
6640 break;
6641 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006642 case HLoadClass::LoadKind::kJitBootImageAddress: {
6643 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6644 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6645 DCHECK_NE(address, 0u);
6646 __ movl(out, Immediate(address));
6647 break;
6648 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006649 case HLoadClass::LoadKind::kJitTableAddress: {
6650 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6651 Label* fixup_label = codegen_->NewJitRootClassPatch(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006652 cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006653 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006654 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006655 break;
6656 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006657 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006658 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006659 LOG(FATAL) << "UNREACHABLE";
6660 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006661 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006662
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006663 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6664 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006665 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006666 codegen_->AddSlowPath(slow_path);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006667
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006668 if (generate_null_check) {
6669 __ testl(out, out);
6670 __ j(kEqual, slow_path->GetEntryLabel());
6671 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006672
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006673 if (cls->MustGenerateClinitCheck()) {
6674 GenerateClassInitializationCheck(slow_path, out);
6675 } else {
6676 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006677 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006678 }
6679}
6680
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006681void LocationsBuilderX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6682 InvokeRuntimeCallingConvention calling_convention;
6683 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6684 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6685}
6686
6687void InstructionCodeGeneratorX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6688 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6689}
6690
Orion Hodson18259d72018-04-12 11:18:23 +01006691void LocationsBuilderX86::VisitLoadMethodType(HLoadMethodType* load) {
6692 InvokeRuntimeCallingConvention calling_convention;
6693 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6694 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6695}
6696
6697void InstructionCodeGeneratorX86::VisitLoadMethodType(HLoadMethodType* load) {
6698 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6699}
6700
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006701void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
6702 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006703 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006704 locations->SetInAt(0, Location::RequiresRegister());
6705 if (check->HasUses()) {
6706 locations->SetOut(Location::SameAsFirstInput());
6707 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006708 // Rely on the type initialization to save everything we need.
6709 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006710}
6711
6712void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006713 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006714 SlowPathCode* slow_path =
6715 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006716 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006717 GenerateClassInitializationCheck(slow_path,
6718 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006719}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006720
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006721void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006722 SlowPathCode* slow_path, Register class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00006723 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
6724 const size_t status_byte_offset =
6725 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01006726 constexpr uint32_t shifted_visibly_initialized_value =
6727 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00006728
Vladimir Markobf121912019-06-04 13:49:05 +01006729 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00006730 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006731 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006732}
6733
Vladimir Marko175e7862018-03-27 09:03:13 +00006734void InstructionCodeGeneratorX86::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
6735 Register temp) {
6736 uint32_t path_to_root = check->GetBitstringPathToRoot();
6737 uint32_t mask = check->GetBitstringMask();
6738 DCHECK(IsPowerOfTwo(mask + 1));
6739 size_t mask_bits = WhichPowerOf2(mask + 1);
6740
6741 if (mask_bits == 16u) {
6742 // Compare the bitstring in memory.
6743 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
6744 } else {
6745 // /* uint32_t */ temp = temp->status_
6746 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
6747 // Compare the bitstring bits using SUB.
6748 __ subl(temp, Immediate(path_to_root));
6749 // Shift out bits that do not contribute to the comparison.
6750 __ shll(temp, Immediate(32u - mask_bits));
6751 }
6752}
6753
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006754HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
6755 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006756 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006757 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006758 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006759 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko764d4542017-05-16 10:31:41 +01006760 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006761 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006762 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006763 case HLoadString::LoadKind::kJitTableAddress:
6764 DCHECK(Runtime::Current()->UseJitCompilation());
6765 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006766 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006767 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006768 }
6769 return desired_string_load_kind;
6770}
6771
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006772void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006773 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006774 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006775 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006776 if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006777 load_kind == HLoadString::LoadKind::kBootImageRelRo ||
Vladimir Markoaad75c62016-10-03 08:46:48 +00006778 load_kind == HLoadString::LoadKind::kBssEntry) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006779 locations->SetInAt(0, Location::RequiresRegister());
6780 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006781 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006782 locations->SetOut(Location::RegisterLocation(EAX));
6783 } else {
6784 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006785 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6786 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006787 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006788 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006789 } else {
6790 // For non-Baker read barrier we have a temp-clobbering call.
6791 }
6792 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006793 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006794}
6795
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006796Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006797 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006798 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006799 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006800 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006801 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006802 PatchInfo<Label>* info = &jit_string_patches_.back();
6803 return &info->label;
6804}
6805
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006806// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6807// move.
6808void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006809 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006810 Location out_loc = locations->Out();
6811 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006812
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006813 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006814 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006815 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6816 codegen_->GetCompilerOptions().IsBootImageExtension());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006817 Register method_address = locations->InAt(0).AsRegister<Register>();
6818 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006819 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006820 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006821 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006822 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006823 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6824 Register method_address = locations->InAt(0).AsRegister<Register>();
6825 __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006826 codegen_->RecordBootImageRelRoPatch(load->InputAt(0)->AsX86ComputeBaseMethodAddress(),
6827 codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006828 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006829 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006830 case HLoadString::LoadKind::kBssEntry: {
6831 Register method_address = locations->InAt(0).AsRegister<Register>();
6832 Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6833 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006834 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006835 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006836 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006837 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006838 codegen_->AddSlowPath(slow_path);
6839 __ testl(out, out);
6840 __ j(kEqual, slow_path->GetEntryLabel());
6841 __ Bind(slow_path->GetExitLabel());
6842 return;
6843 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006844 case HLoadString::LoadKind::kJitBootImageAddress: {
6845 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6846 DCHECK_NE(address, 0u);
6847 __ movl(out, Immediate(address));
6848 return;
6849 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006850 case HLoadString::LoadKind::kJitTableAddress: {
6851 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6852 Label* fixup_label = codegen_->NewJitRootStringPatch(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006853 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006854 // /* GcRoot<mirror::String> */ out = *address
6855 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6856 return;
6857 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006858 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006859 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006860 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006861
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006862 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006863 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006864 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006865 __ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex().index_));
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006866 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6867 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006868}
6869
David Brazdilcb1c0552015-08-04 16:22:25 +01006870static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006871 return Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01006872}
6873
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006874void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
6875 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006876 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006877 locations->SetOut(Location::RequiresRegister());
6878}
6879
6880void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006881 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
6882}
6883
6884void LocationsBuilderX86::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006885 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006886}
6887
6888void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6889 __ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006890}
6891
6892void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006893 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6894 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006895 InvokeRuntimeCallingConvention calling_convention;
6896 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6897}
6898
6899void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006900 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006901 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006902}
6903
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006904// Temp is used for read barrier.
6905static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6906 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006907 !kUseBakerReadBarrier &&
6908 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain7c1559a2015-12-15 10:55:36 +00006909 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006910 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6911 return 1;
6912 }
6913 return 0;
6914}
6915
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006916// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6917// interface pointer, the current interface is compared in memory.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006918// The other checks have one temp for loading the object's class.
6919static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006920 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006921 return 2;
6922 }
6923 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006924}
6925
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006926void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006927 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006928 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006929 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006930 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006931 case TypeCheckKind::kExactCheck:
6932 case TypeCheckKind::kAbstractClassCheck:
6933 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006934 case TypeCheckKind::kArrayObjectCheck: {
6935 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6936 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6937 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006938 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006939 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006940 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006941 case TypeCheckKind::kUnresolvedCheck:
6942 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006943 call_kind = LocationSummary::kCallOnSlowPath;
6944 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006945 case TypeCheckKind::kBitstringCheck:
6946 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006947 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006948
Vladimir Markoca6fff82017-10-03 14:49:14 +01006949 LocationSummary* locations =
6950 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006951 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006952 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006953 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006954 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006955 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6956 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6957 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6958 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6959 } else {
6960 locations->SetInAt(1, Location::Any());
6961 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006962 // Note that TypeCheckSlowPathX86 uses this "out" register too.
6963 locations->SetOut(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006964 // When read barriers are enabled, we need a temporary register for some cases.
6965 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006966}
6967
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006968void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006969 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006970 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006971 Location obj_loc = locations->InAt(0);
6972 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006973 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006974 Location out_loc = locations->Out();
6975 Register out = out_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006976 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6977 DCHECK_LE(num_temps, 1u);
6978 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006979 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006980 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6981 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6982 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006983 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006984 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006985
6986 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006987 // Avoid null check if we know obj is not null.
6988 if (instruction->MustDoNullCheck()) {
6989 __ testl(obj, obj);
6990 __ j(kEqual, &zero);
6991 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006992
Roland Levillain7c1559a2015-12-15 10:55:36 +00006993 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006994 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006995 ReadBarrierOption read_barrier_option =
6996 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006997 // /* HeapReference<Class> */ out = obj->klass_
6998 GenerateReferenceLoadTwoRegisters(instruction,
6999 out_loc,
7000 obj_loc,
7001 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007002 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007003 if (cls.IsRegister()) {
7004 __ cmpl(out, cls.AsRegister<Register>());
7005 } else {
7006 DCHECK(cls.IsStackSlot()) << cls;
7007 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7008 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007009
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007010 // Classes must be equal for the instanceof to succeed.
7011 __ j(kNotEqual, &zero);
7012 __ movl(out, Immediate(1));
7013 __ jmp(&done);
7014 break;
7015 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007016
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007017 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007018 ReadBarrierOption read_barrier_option =
7019 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007020 // /* HeapReference<Class> */ out = obj->klass_
7021 GenerateReferenceLoadTwoRegisters(instruction,
7022 out_loc,
7023 obj_loc,
7024 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007025 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007026 // If the class is abstract, we eagerly fetch the super class of the
7027 // object to avoid doing a comparison we know will fail.
7028 NearLabel loop;
7029 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007030 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007031 GenerateReferenceLoadOneRegister(instruction,
7032 out_loc,
7033 super_offset,
7034 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007035 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007036 __ testl(out, out);
7037 // If `out` is null, we use it for the result, and jump to `done`.
7038 __ j(kEqual, &done);
7039 if (cls.IsRegister()) {
7040 __ cmpl(out, cls.AsRegister<Register>());
7041 } else {
7042 DCHECK(cls.IsStackSlot()) << cls;
7043 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7044 }
7045 __ j(kNotEqual, &loop);
7046 __ movl(out, Immediate(1));
7047 if (zero.IsLinked()) {
7048 __ jmp(&done);
7049 }
7050 break;
7051 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007052
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007053 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007054 ReadBarrierOption read_barrier_option =
7055 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007056 // /* HeapReference<Class> */ out = obj->klass_
7057 GenerateReferenceLoadTwoRegisters(instruction,
7058 out_loc,
7059 obj_loc,
7060 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007061 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007062 // Walk over the class hierarchy to find a match.
7063 NearLabel loop, success;
7064 __ Bind(&loop);
7065 if (cls.IsRegister()) {
7066 __ cmpl(out, cls.AsRegister<Register>());
7067 } else {
7068 DCHECK(cls.IsStackSlot()) << cls;
7069 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7070 }
7071 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007072 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007073 GenerateReferenceLoadOneRegister(instruction,
7074 out_loc,
7075 super_offset,
7076 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007077 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007078 __ testl(out, out);
7079 __ j(kNotEqual, &loop);
7080 // If `out` is null, we use it for the result, and jump to `done`.
7081 __ jmp(&done);
7082 __ Bind(&success);
7083 __ movl(out, Immediate(1));
7084 if (zero.IsLinked()) {
7085 __ jmp(&done);
7086 }
7087 break;
7088 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007089
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007090 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007091 ReadBarrierOption read_barrier_option =
7092 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007093 // /* HeapReference<Class> */ out = obj->klass_
7094 GenerateReferenceLoadTwoRegisters(instruction,
7095 out_loc,
7096 obj_loc,
7097 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007098 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007099 // Do an exact check.
7100 NearLabel exact_check;
7101 if (cls.IsRegister()) {
7102 __ cmpl(out, cls.AsRegister<Register>());
7103 } else {
7104 DCHECK(cls.IsStackSlot()) << cls;
7105 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7106 }
7107 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007108 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007109 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007110 GenerateReferenceLoadOneRegister(instruction,
7111 out_loc,
7112 component_offset,
7113 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007114 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007115 __ testl(out, out);
7116 // If `out` is null, we use it for the result, and jump to `done`.
7117 __ j(kEqual, &done);
7118 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
7119 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007120 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007121 __ movl(out, Immediate(1));
7122 __ jmp(&done);
7123 break;
7124 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007125
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007126 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007127 // No read barrier since the slow path will retry upon failure.
7128 // /* HeapReference<Class> */ out = obj->klass_
7129 GenerateReferenceLoadTwoRegisters(instruction,
7130 out_loc,
7131 obj_loc,
7132 class_offset,
7133 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007134 if (cls.IsRegister()) {
7135 __ cmpl(out, cls.AsRegister<Register>());
7136 } else {
7137 DCHECK(cls.IsStackSlot()) << cls;
7138 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7139 }
7140 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007141 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007142 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007143 codegen_->AddSlowPath(slow_path);
7144 __ j(kNotEqual, slow_path->GetEntryLabel());
7145 __ movl(out, Immediate(1));
7146 if (zero.IsLinked()) {
7147 __ jmp(&done);
7148 }
7149 break;
7150 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007151
Calin Juravle98893e12015-10-02 21:05:03 +01007152 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00007153 case TypeCheckKind::kInterfaceCheck: {
7154 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007155 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00007156 // cases.
7157 //
7158 // We cannot directly call the InstanceofNonTrivial runtime
7159 // entry point without resorting to a type checking slow path
7160 // here (i.e. by calling InvokeRuntime directly), as it would
7161 // require to assign fixed registers for the inputs of this
7162 // HInstanceOf instruction (following the runtime calling
7163 // convention), which might be cluttered by the potential first
7164 // read barrier emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007165 //
7166 // TODO: Introduce a new runtime entry point taking the object
7167 // to test (instead of its class) as argument, and let it deal
7168 // with the read barrier issues. This will let us refactor this
7169 // case of the `switch` code as it was previously (with a direct
7170 // call to the runtime not using a type checking slow path).
7171 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007172 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007173 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007174 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007175 codegen_->AddSlowPath(slow_path);
7176 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007177 if (zero.IsLinked()) {
7178 __ jmp(&done);
7179 }
7180 break;
7181 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007182
7183 case TypeCheckKind::kBitstringCheck: {
7184 // /* HeapReference<Class> */ temp = obj->klass_
7185 GenerateReferenceLoadTwoRegisters(instruction,
7186 out_loc,
7187 obj_loc,
7188 class_offset,
7189 kWithoutReadBarrier);
7190
7191 GenerateBitstringTypeCheckCompare(instruction, out);
7192 __ j(kNotEqual, &zero);
7193 __ movl(out, Immediate(1));
7194 __ jmp(&done);
7195 break;
7196 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007197 }
7198
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007199 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007200 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007201 __ xorl(out, out);
7202 }
7203
7204 if (done.IsLinked()) {
7205 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007206 }
7207
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007208 if (slow_path != nullptr) {
7209 __ Bind(slow_path->GetExitLabel());
7210 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007211}
7212
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007213void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007214 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00007215 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007216 LocationSummary* locations =
7217 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007218 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007219 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
7220 // Require a register for the interface check since there is a loop that compares the class to
7221 // a memory address.
7222 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007223 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
7224 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
7225 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
7226 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007227 } else {
7228 locations->SetInAt(1, Location::Any());
7229 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01007230 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007231 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
7232}
7233
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007234void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007235 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007236 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007237 Location obj_loc = locations->InAt(0);
7238 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007239 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007240 Location temp_loc = locations->GetTemp(0);
7241 Register temp = temp_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007242 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
7243 DCHECK_GE(num_temps, 1u);
7244 DCHECK_LE(num_temps, 2u);
7245 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
7246 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7247 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7248 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7249 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
7250 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
7251 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
7252 const uint32_t object_array_data_offset =
7253 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007254
Vladimir Marko87584542017-12-12 17:47:52 +00007255 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007256 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007257 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
7258 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007259 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007260
Roland Levillain0d5a2812015-11-13 10:07:31 +00007261 NearLabel done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007262 // Avoid null check if we know obj is not null.
7263 if (instruction->MustDoNullCheck()) {
7264 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007265 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007266 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007267
Roland Levillain0d5a2812015-11-13 10:07:31 +00007268 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007269 case TypeCheckKind::kExactCheck:
7270 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007271 // /* HeapReference<Class> */ temp = obj->klass_
7272 GenerateReferenceLoadTwoRegisters(instruction,
7273 temp_loc,
7274 obj_loc,
7275 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007276 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007277
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007278 if (cls.IsRegister()) {
7279 __ cmpl(temp, cls.AsRegister<Register>());
7280 } else {
7281 DCHECK(cls.IsStackSlot()) << cls;
7282 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7283 }
7284 // Jump to slow path for throwing the exception or doing a
7285 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007286 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007287 break;
7288 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007289
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007290 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007291 // /* HeapReference<Class> */ temp = obj->klass_
7292 GenerateReferenceLoadTwoRegisters(instruction,
7293 temp_loc,
7294 obj_loc,
7295 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007296 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007297
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007298 // If the class is abstract, we eagerly fetch the super class of the
7299 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007300 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007301 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007302 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007303 GenerateReferenceLoadOneRegister(instruction,
7304 temp_loc,
7305 super_offset,
7306 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007307 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007308
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007309 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7310 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007311 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007312 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00007313
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007314 // Otherwise, compare the classes
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007315 if (cls.IsRegister()) {
7316 __ cmpl(temp, cls.AsRegister<Register>());
7317 } else {
7318 DCHECK(cls.IsStackSlot()) << cls;
7319 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7320 }
7321 __ j(kNotEqual, &loop);
7322 break;
7323 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007324
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007325 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007326 // /* HeapReference<Class> */ temp = obj->klass_
7327 GenerateReferenceLoadTwoRegisters(instruction,
7328 temp_loc,
7329 obj_loc,
7330 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007331 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007332
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007333 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007334 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007335 __ Bind(&loop);
7336 if (cls.IsRegister()) {
7337 __ cmpl(temp, cls.AsRegister<Register>());
7338 } else {
7339 DCHECK(cls.IsStackSlot()) << cls;
7340 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7341 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007342 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007343
Roland Levillain0d5a2812015-11-13 10:07:31 +00007344 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007345 GenerateReferenceLoadOneRegister(instruction,
7346 temp_loc,
7347 super_offset,
7348 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007349 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007350
7351 // If the class reference currently in `temp` is not null, jump
7352 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007353 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007354 __ j(kNotZero, &loop);
7355 // Otherwise, jump to the slow path to throw the exception.;
Roland Levillain0d5a2812015-11-13 10:07:31 +00007356 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007357 break;
7358 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007359
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007360 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007361 // /* HeapReference<Class> */ temp = obj->klass_
7362 GenerateReferenceLoadTwoRegisters(instruction,
7363 temp_loc,
7364 obj_loc,
7365 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007366 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007367
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007368 // Do an exact check.
7369 if (cls.IsRegister()) {
7370 __ cmpl(temp, cls.AsRegister<Register>());
7371 } else {
7372 DCHECK(cls.IsStackSlot()) << cls;
7373 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7374 }
7375 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007376
7377 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007378 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007379 GenerateReferenceLoadOneRegister(instruction,
7380 temp_loc,
7381 component_offset,
7382 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007383 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007384
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007385 // If the component type is null (i.e. the object not an array), jump to the slow path to
7386 // throw the exception. Otherwise proceed with the check.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007387 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007388 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00007389
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007390 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007391 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007392 break;
7393 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007394
Calin Juravle98893e12015-10-02 21:05:03 +01007395 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007396 // We always go into the type check slow path for the unresolved check case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007397 // We cannot directly call the CheckCast runtime entry point
7398 // without resorting to a type checking slow path here (i.e. by
7399 // calling InvokeRuntime directly), as it would require to
7400 // assign fixed registers for the inputs of this HInstanceOf
7401 // instruction (following the runtime calling convention), which
7402 // might be cluttered by the potential first read barrier
7403 // emission at the beginning of this method.
7404 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007405 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007406
7407 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007408 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
7409 // We can not get false positives by doing this.
7410 // /* HeapReference<Class> */ temp = obj->klass_
7411 GenerateReferenceLoadTwoRegisters(instruction,
7412 temp_loc,
7413 obj_loc,
7414 class_offset,
7415 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007416
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007417 // /* HeapReference<Class> */ temp = temp->iftable_
7418 GenerateReferenceLoadTwoRegisters(instruction,
7419 temp_loc,
7420 temp_loc,
7421 iftable_offset,
7422 kWithoutReadBarrier);
7423 // Iftable is never null.
7424 __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
7425 // Maybe poison the `cls` for direct comparison with memory.
7426 __ MaybePoisonHeapReference(cls.AsRegister<Register>());
7427 // Loop through the iftable and check if any class matches.
7428 NearLabel start_loop;
7429 __ Bind(&start_loop);
7430 // Need to subtract first to handle the empty array case.
7431 __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
7432 __ j(kNegative, type_check_slow_path->GetEntryLabel());
7433 // Go to next interface if the classes do not match.
7434 __ cmpl(cls.AsRegister<Register>(),
7435 CodeGeneratorX86::ArrayAddress(temp,
7436 maybe_temp2_loc,
7437 TIMES_4,
7438 object_array_data_offset));
7439 __ j(kNotEqual, &start_loop);
7440 // If `cls` was poisoned above, unpoison it.
7441 __ MaybeUnpoisonHeapReference(cls.AsRegister<Register>());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007442 break;
7443 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007444
7445 case TypeCheckKind::kBitstringCheck: {
7446 // /* HeapReference<Class> */ temp = obj->klass_
7447 GenerateReferenceLoadTwoRegisters(instruction,
7448 temp_loc,
7449 obj_loc,
7450 class_offset,
7451 kWithoutReadBarrier);
7452
7453 GenerateBitstringTypeCheckCompare(instruction, temp);
7454 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
7455 break;
7456 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007457 }
7458 __ Bind(&done);
7459
Roland Levillain0d5a2812015-11-13 10:07:31 +00007460 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007461}
7462
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007463void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007464 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7465 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007466 InvokeRuntimeCallingConvention calling_convention;
7467 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7468}
7469
7470void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01007471 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
7472 : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01007473 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01007474 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00007475 if (instruction->IsEnter()) {
7476 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7477 } else {
7478 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7479 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007480}
7481
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05307482void LocationsBuilderX86::VisitX86AndNot(HX86AndNot* instruction) {
7483 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7484 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
7485 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7486 locations->SetInAt(0, Location::RequiresRegister());
7487 locations->SetInAt(1, Location::RequiresRegister());
7488 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7489}
7490
7491void InstructionCodeGeneratorX86::VisitX86AndNot(HX86AndNot* instruction) {
7492 LocationSummary* locations = instruction->GetLocations();
7493 Location first = locations->InAt(0);
7494 Location second = locations->InAt(1);
7495 Location dest = locations->Out();
7496 if (instruction->GetResultType() == DataType::Type::kInt32) {
7497 __ andn(dest.AsRegister<Register>(),
7498 first.AsRegister<Register>(),
7499 second.AsRegister<Register>());
7500 } else {
7501 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
7502 __ andn(dest.AsRegisterPairLow<Register>(),
7503 first.AsRegisterPairLow<Register>(),
7504 second.AsRegisterPairLow<Register>());
7505 __ andn(dest.AsRegisterPairHigh<Register>(),
7506 first.AsRegisterPairHigh<Register>(),
7507 second.AsRegisterPairHigh<Register>());
7508 }
7509}
7510
7511void LocationsBuilderX86::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
7512 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7513 DCHECK(instruction->GetType() == DataType::Type::kInt32) << instruction->GetType();
7514 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7515 locations->SetInAt(0, Location::RequiresRegister());
7516 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7517}
7518
7519void InstructionCodeGeneratorX86::VisitX86MaskOrResetLeastSetBit(
7520 HX86MaskOrResetLeastSetBit* instruction) {
7521 LocationSummary* locations = instruction->GetLocations();
7522 Location src = locations->InAt(0);
7523 Location dest = locations->Out();
7524 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
7525 switch (instruction->GetOpKind()) {
7526 case HInstruction::kAnd:
7527 __ blsr(dest.AsRegister<Register>(), src.AsRegister<Register>());
7528 break;
7529 case HInstruction::kXor:
7530 __ blsmsk(dest.AsRegister<Register>(), src.AsRegister<Register>());
7531 break;
7532 default:
7533 LOG(FATAL) << "Unreachable";
7534 }
7535}
7536
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007537void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
7538void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
7539void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
7540
7541void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
7542 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007543 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007544 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
7545 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007546 locations->SetInAt(0, Location::RequiresRegister());
7547 locations->SetInAt(1, Location::Any());
7548 locations->SetOut(Location::SameAsFirstInput());
7549}
7550
7551void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
7552 HandleBitwiseOperation(instruction);
7553}
7554
7555void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
7556 HandleBitwiseOperation(instruction);
7557}
7558
7559void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
7560 HandleBitwiseOperation(instruction);
7561}
7562
7563void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
7564 LocationSummary* locations = instruction->GetLocations();
7565 Location first = locations->InAt(0);
7566 Location second = locations->InAt(1);
7567 DCHECK(first.Equals(locations->Out()));
7568
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007569 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007570 if (second.IsRegister()) {
7571 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007572 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007573 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007574 __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007575 } else {
7576 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007577 __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007578 }
7579 } else if (second.IsConstant()) {
7580 if (instruction->IsAnd()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007581 __ andl(first.AsRegister<Register>(),
7582 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007583 } else if (instruction->IsOr()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007584 __ orl(first.AsRegister<Register>(),
7585 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007586 } else {
7587 DCHECK(instruction->IsXor());
Roland Levillain199f3362014-11-27 17:15:16 +00007588 __ xorl(first.AsRegister<Register>(),
7589 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007590 }
7591 } else {
7592 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007593 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007594 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007595 __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007596 } else {
7597 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007598 __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007599 }
7600 }
7601 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007602 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007603 if (second.IsRegisterPair()) {
7604 if (instruction->IsAnd()) {
7605 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7606 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7607 } else if (instruction->IsOr()) {
7608 __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7609 __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7610 } else {
7611 DCHECK(instruction->IsXor());
7612 __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7613 __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7614 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007615 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007616 if (instruction->IsAnd()) {
7617 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7618 __ andl(first.AsRegisterPairHigh<Register>(),
7619 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7620 } else if (instruction->IsOr()) {
7621 __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7622 __ orl(first.AsRegisterPairHigh<Register>(),
7623 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7624 } else {
7625 DCHECK(instruction->IsXor());
7626 __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7627 __ xorl(first.AsRegisterPairHigh<Register>(),
7628 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7629 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007630 } else {
7631 DCHECK(second.IsConstant()) << second;
7632 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007633 int32_t low_value = Low32Bits(value);
7634 int32_t high_value = High32Bits(value);
7635 Immediate low(low_value);
7636 Immediate high(high_value);
7637 Register first_low = first.AsRegisterPairLow<Register>();
7638 Register first_high = first.AsRegisterPairHigh<Register>();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007639 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007640 if (low_value == 0) {
7641 __ xorl(first_low, first_low);
7642 } else if (low_value != -1) {
7643 __ andl(first_low, low);
7644 }
7645 if (high_value == 0) {
7646 __ xorl(first_high, first_high);
7647 } else if (high_value != -1) {
7648 __ andl(first_high, high);
7649 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007650 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007651 if (low_value != 0) {
7652 __ orl(first_low, low);
7653 }
7654 if (high_value != 0) {
7655 __ orl(first_high, high);
7656 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007657 } else {
7658 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007659 if (low_value != 0) {
7660 __ xorl(first_low, low);
7661 }
7662 if (high_value != 0) {
7663 __ xorl(first_high, high);
7664 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007665 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007666 }
7667 }
7668}
7669
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007670void InstructionCodeGeneratorX86::GenerateReferenceLoadOneRegister(
7671 HInstruction* instruction,
7672 Location out,
7673 uint32_t offset,
7674 Location maybe_temp,
7675 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007676 Register out_reg = out.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007677 if (read_barrier_option == kWithReadBarrier) {
7678 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007679 if (kUseBakerReadBarrier) {
7680 // Load with fast path based Baker's read barrier.
7681 // /* HeapReference<Object> */ out = *(out + offset)
7682 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007683 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007684 } else {
7685 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007686 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain7c1559a2015-12-15 10:55:36 +00007687 // in the following move operation, as we will need it for the
7688 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007689 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007690 __ movl(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007691 // /* HeapReference<Object> */ out = *(out + offset)
7692 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007693 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007694 }
7695 } else {
7696 // Plain load with no read barrier.
7697 // /* HeapReference<Object> */ out = *(out + offset)
7698 __ movl(out_reg, Address(out_reg, offset));
7699 __ MaybeUnpoisonHeapReference(out_reg);
7700 }
7701}
7702
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007703void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(
7704 HInstruction* instruction,
7705 Location out,
7706 Location obj,
7707 uint32_t offset,
7708 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007709 Register out_reg = out.AsRegister<Register>();
7710 Register obj_reg = obj.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007711 if (read_barrier_option == kWithReadBarrier) {
7712 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007713 if (kUseBakerReadBarrier) {
7714 // Load with fast path based Baker's read barrier.
7715 // /* HeapReference<Object> */ out = *(obj + offset)
7716 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007717 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007718 } else {
7719 // Load with slow path based read barrier.
7720 // /* HeapReference<Object> */ out = *(obj + offset)
7721 __ movl(out_reg, Address(obj_reg, offset));
7722 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7723 }
7724 } else {
7725 // Plain load with no read barrier.
7726 // /* HeapReference<Object> */ out = *(obj + offset)
7727 __ movl(out_reg, Address(obj_reg, offset));
7728 __ MaybeUnpoisonHeapReference(out_reg);
7729 }
7730}
7731
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007732void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(
7733 HInstruction* instruction,
7734 Location root,
7735 const Address& address,
7736 Label* fixup_label,
7737 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007738 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007739 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007740 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007741 if (kUseBakerReadBarrier) {
7742 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7743 // Baker's read barrier are used:
7744 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007745 // root = obj.field;
7746 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7747 // if (temp != null) {
7748 // root = temp(root)
Roland Levillain7c1559a2015-12-15 10:55:36 +00007749 // }
7750
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007751 // /* GcRoot<mirror::Object> */ root = *address
7752 __ movl(root_reg, address);
7753 if (fixup_label != nullptr) {
7754 __ Bind(fixup_label);
7755 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007756 static_assert(
7757 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7758 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7759 "have different sizes.");
7760 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7761 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7762 "have different sizes.");
7763
Vladimir Marko953437b2016-08-24 08:30:46 +00007764 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007765 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007766 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007767 codegen_->AddSlowPath(slow_path);
7768
Roland Levillaind966ce72017-02-09 16:20:14 +00007769 // Test the entrypoint (`Thread::Current()->pReadBarrierMarkReg ## root.reg()`).
7770 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007771 Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(root.reg());
Roland Levillaind966ce72017-02-09 16:20:14 +00007772 __ fs()->cmpl(Address::Absolute(entry_point_offset), Immediate(0));
7773 // The entrypoint is null when the GC is not marking.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007774 __ j(kNotEqual, slow_path->GetEntryLabel());
7775 __ Bind(slow_path->GetExitLabel());
7776 } else {
7777 // GC root loaded through a slow path for read barriers other
7778 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007779 // /* GcRoot<mirror::Object>* */ root = address
7780 __ leal(root_reg, address);
7781 if (fixup_label != nullptr) {
7782 __ Bind(fixup_label);
7783 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007784 // /* mirror::Object* */ root = root->Read()
7785 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7786 }
7787 } else {
7788 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007789 // /* GcRoot<mirror::Object> */ root = *address
7790 __ movl(root_reg, address);
7791 if (fixup_label != nullptr) {
7792 __ Bind(fixup_label);
7793 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007794 // Note that GC roots are not affected by heap poisoning, thus we
7795 // do not have to unpoison `root_reg` here.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007796 }
7797}
7798
7799void CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7800 Location ref,
7801 Register obj,
7802 uint32_t offset,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007803 bool needs_null_check) {
7804 DCHECK(kEmitCompilerReadBarrier);
7805 DCHECK(kUseBakerReadBarrier);
7806
7807 // /* HeapReference<Object> */ ref = *(obj + offset)
7808 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007809 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007810}
7811
7812void CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7813 Location ref,
7814 Register obj,
7815 uint32_t data_offset,
7816 Location index,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007817 bool needs_null_check) {
7818 DCHECK(kEmitCompilerReadBarrier);
7819 DCHECK(kUseBakerReadBarrier);
7820
Roland Levillain3d312422016-06-23 13:53:42 +01007821 static_assert(
7822 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7823 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00007824 // /* HeapReference<Object> */ ref =
7825 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007826 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007827 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007828}
7829
7830void CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7831 Location ref,
7832 Register obj,
7833 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007834 bool needs_null_check,
7835 bool always_update_field,
7836 Register* temp) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007837 DCHECK(kEmitCompilerReadBarrier);
7838 DCHECK(kUseBakerReadBarrier);
7839
7840 // In slow path based read barriers, the read barrier call is
7841 // inserted after the original load. However, in fast path based
7842 // Baker's read barriers, we need to perform the load of
7843 // mirror::Object::monitor_ *before* the original reference load.
7844 // This load-load ordering is required by the read barrier.
7845 // The fast path/slow path (for Baker's algorithm) should look like:
7846 //
7847 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7848 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7849 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007850 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007851 // if (is_gray) {
7852 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7853 // }
7854 //
7855 // Note: the original implementation in ReadBarrier::Barrier is
7856 // slightly more complex as:
7857 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007858 // the high-bits of rb_state, which are expected to be all zeroes
7859 // (we use CodeGeneratorX86::GenerateMemoryBarrier instead here,
7860 // which is a no-op thanks to the x86 memory model);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007861 // - it performs additional checks that we do not do here for
7862 // performance reasons.
7863
7864 Register ref_reg = ref.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +00007865 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7866
Vladimir Marko953437b2016-08-24 08:30:46 +00007867 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007868 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007869 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007870 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7871 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7872 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7873
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007874 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007875 // ref = ReadBarrier::Mark(ref);
7876 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7877 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain7c1559a2015-12-15 10:55:36 +00007878 if (needs_null_check) {
7879 MaybeRecordImplicitNullCheck(instruction);
7880 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007881
7882 // Load fence to prevent load-load reordering.
7883 // Note that this is a no-op, thanks to the x86 memory model.
7884 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7885
7886 // The actual reference load.
7887 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007888 __ movl(ref_reg, src); // Flags are unaffected.
7889
7890 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7891 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007892 SlowPathCode* slow_path;
7893 if (always_update_field) {
7894 DCHECK(temp != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007895 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007896 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007897 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007898 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007899 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007900 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007901 AddSlowPath(slow_path);
7902
7903 // We have done the "if" of the gray bit check above, now branch based on the flags.
7904 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007905
7906 // Object* ref = ref_addr->AsMirrorPtr()
7907 __ MaybeUnpoisonHeapReference(ref_reg);
7908
Roland Levillain7c1559a2015-12-15 10:55:36 +00007909 __ Bind(slow_path->GetExitLabel());
7910}
7911
7912void CodeGeneratorX86::GenerateReadBarrierSlow(HInstruction* instruction,
7913 Location out,
7914 Location ref,
7915 Location obj,
7916 uint32_t offset,
7917 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007918 DCHECK(kEmitCompilerReadBarrier);
7919
Roland Levillain7c1559a2015-12-15 10:55:36 +00007920 // Insert a slow path based read barrier *after* the reference load.
7921 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007922 // If heap poisoning is enabled, the unpoisoning of the loaded
7923 // reference will be carried out by the runtime within the slow
7924 // path.
7925 //
7926 // Note that `ref` currently does not get unpoisoned (when heap
7927 // poisoning is enabled), which is alright as the `ref` argument is
7928 // not used by the artReadBarrierSlow entry point.
7929 //
7930 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007931 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007932 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
7933 AddSlowPath(slow_path);
7934
Roland Levillain0d5a2812015-11-13 10:07:31 +00007935 __ jmp(slow_path->GetEntryLabel());
7936 __ Bind(slow_path->GetExitLabel());
7937}
7938
Roland Levillain7c1559a2015-12-15 10:55:36 +00007939void CodeGeneratorX86::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7940 Location out,
7941 Location ref,
7942 Location obj,
7943 uint32_t offset,
7944 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007945 if (kEmitCompilerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007946 // Baker's read barriers shall be handled by the fast path
7947 // (CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier).
7948 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007949 // If heap poisoning is enabled, unpoisoning will be taken care of
7950 // by the runtime within the slow path.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007951 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007952 } else if (kPoisonHeapReferences) {
7953 __ UnpoisonHeapReference(out.AsRegister<Register>());
7954 }
7955}
7956
Roland Levillain7c1559a2015-12-15 10:55:36 +00007957void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7958 Location out,
7959 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007960 DCHECK(kEmitCompilerReadBarrier);
7961
Roland Levillain7c1559a2015-12-15 10:55:36 +00007962 // Insert a slow path based read barrier *after* the GC root load.
7963 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007964 // Note that GC roots are not affected by heap poisoning, so we do
7965 // not need to do anything special for this here.
7966 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007967 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007968 AddSlowPath(slow_path);
7969
Roland Levillain0d5a2812015-11-13 10:07:31 +00007970 __ jmp(slow_path->GetEntryLabel());
7971 __ Bind(slow_path->GetExitLabel());
7972}
7973
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007974void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007975 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007976 LOG(FATAL) << "Unreachable";
7977}
7978
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007979void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007980 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007981 LOG(FATAL) << "Unreachable";
7982}
7983
Mark Mendellfe57faa2015-09-18 09:26:15 -04007984// Simple implementation of packed switch - generate cascaded compare/jumps.
7985void LocationsBuilderX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7986 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007987 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007988 locations->SetInAt(0, Location::RequiresRegister());
7989}
7990
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007991void InstructionCodeGeneratorX86::GenPackedSwitchWithCompares(Register value_reg,
7992 int32_t lower_bound,
7993 uint32_t num_entries,
7994 HBasicBlock* switch_block,
7995 HBasicBlock* default_block) {
7996 // Figure out the correct compare values and jump conditions.
7997 // Handle the first compare/branch as a special case because it might
7998 // jump to the default case.
7999 DCHECK_GT(num_entries, 2u);
8000 Condition first_condition;
8001 uint32_t index;
8002 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8003 if (lower_bound != 0) {
8004 first_condition = kLess;
8005 __ cmpl(value_reg, Immediate(lower_bound));
8006 __ j(first_condition, codegen_->GetLabelOf(default_block));
8007 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008008
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008009 index = 1;
8010 } else {
8011 // Handle all the compare/jumps below.
8012 first_condition = kBelow;
8013 index = 0;
8014 }
8015
8016 // Handle the rest of the compare/jumps.
8017 for (; index + 1 < num_entries; index += 2) {
8018 int32_t compare_to_value = lower_bound + index + 1;
8019 __ cmpl(value_reg, Immediate(compare_to_value));
8020 // Jump to successors[index] if value < case_value[index].
8021 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
8022 // Jump to successors[index + 1] if value == case_value[index + 1].
8023 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
8024 }
8025
8026 if (index != num_entries) {
8027 // There are an odd number of entries. Handle the last one.
8028 DCHECK_EQ(index + 1, num_entries);
8029 __ cmpl(value_reg, Immediate(lower_bound + index));
8030 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008031 }
8032
8033 // And the default for any other value.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008034 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
8035 __ jmp(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008036 }
8037}
8038
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008039void InstructionCodeGeneratorX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8040 int32_t lower_bound = switch_instr->GetStartValue();
8041 uint32_t num_entries = switch_instr->GetNumEntries();
8042 LocationSummary* locations = switch_instr->GetLocations();
8043 Register value_reg = locations->InAt(0).AsRegister<Register>();
8044
8045 GenPackedSwitchWithCompares(value_reg,
8046 lower_bound,
8047 num_entries,
8048 switch_instr->GetBlock(),
8049 switch_instr->GetDefaultBlock());
8050}
8051
Mark Mendell805b3b52015-09-18 14:10:29 -04008052void LocationsBuilderX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8053 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008054 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendell805b3b52015-09-18 14:10:29 -04008055 locations->SetInAt(0, Location::RequiresRegister());
8056
8057 // Constant area pointer.
8058 locations->SetInAt(1, Location::RequiresRegister());
8059
8060 // And the temporary we need.
8061 locations->AddTemp(Location::RequiresRegister());
8062}
8063
8064void InstructionCodeGeneratorX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8065 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008066 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendell805b3b52015-09-18 14:10:29 -04008067 LocationSummary* locations = switch_instr->GetLocations();
8068 Register value_reg = locations->InAt(0).AsRegister<Register>();
8069 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8070
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008071 if (num_entries <= kPackedSwitchJumpTableThreshold) {
8072 GenPackedSwitchWithCompares(value_reg,
8073 lower_bound,
8074 num_entries,
8075 switch_instr->GetBlock(),
8076 default_block);
8077 return;
8078 }
8079
Mark Mendell805b3b52015-09-18 14:10:29 -04008080 // Optimizing has a jump area.
8081 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
8082 Register constant_area = locations->InAt(1).AsRegister<Register>();
8083
8084 // Remove the bias, if needed.
8085 if (lower_bound != 0) {
8086 __ leal(temp_reg, Address(value_reg, -lower_bound));
8087 value_reg = temp_reg;
8088 }
8089
8090 // Is the value in range?
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008091 DCHECK_GE(num_entries, 1u);
Mark Mendell805b3b52015-09-18 14:10:29 -04008092 __ cmpl(value_reg, Immediate(num_entries - 1));
8093 __ j(kAbove, codegen_->GetLabelOf(default_block));
8094
8095 // We are in the range of the table.
8096 // Load (target-constant_area) from the jump table, indexing by the value.
8097 __ movl(temp_reg, codegen_->LiteralCaseTable(switch_instr, constant_area, value_reg));
8098
8099 // Compute the actual target address by adding in constant_area.
8100 __ addl(temp_reg, constant_area);
8101
8102 // And jump.
8103 __ jmp(temp_reg);
8104}
8105
Mark Mendell0616ae02015-04-17 12:49:27 -04008106void LocationsBuilderX86::VisitX86ComputeBaseMethodAddress(
8107 HX86ComputeBaseMethodAddress* insn) {
8108 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008109 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008110 locations->SetOut(Location::RequiresRegister());
8111}
8112
8113void InstructionCodeGeneratorX86::VisitX86ComputeBaseMethodAddress(
8114 HX86ComputeBaseMethodAddress* insn) {
8115 LocationSummary* locations = insn->GetLocations();
8116 Register reg = locations->Out().AsRegister<Register>();
8117
8118 // Generate call to next instruction.
8119 Label next_instruction;
8120 __ call(&next_instruction);
8121 __ Bind(&next_instruction);
8122
8123 // Remember this offset for later use with constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008124 codegen_->AddMethodAddressOffset(insn, GetAssembler()->CodeSize());
Mark Mendell0616ae02015-04-17 12:49:27 -04008125
8126 // Grab the return address off the stack.
8127 __ popl(reg);
8128}
8129
8130void LocationsBuilderX86::VisitX86LoadFromConstantTable(
8131 HX86LoadFromConstantTable* insn) {
8132 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008133 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008134
8135 locations->SetInAt(0, Location::RequiresRegister());
8136 locations->SetInAt(1, Location::ConstantLocation(insn->GetConstant()));
8137
8138 // If we don't need to be materialized, we only need the inputs to be set.
David Brazdilb3e773e2016-01-26 11:28:37 +00008139 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008140 return;
8141 }
8142
8143 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008144 case DataType::Type::kFloat32:
8145 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008146 locations->SetOut(Location::RequiresFpuRegister());
8147 break;
8148
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008149 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008150 locations->SetOut(Location::RequiresRegister());
8151 break;
8152
8153 default:
8154 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8155 }
8156}
8157
8158void InstructionCodeGeneratorX86::VisitX86LoadFromConstantTable(HX86LoadFromConstantTable* insn) {
David Brazdilb3e773e2016-01-26 11:28:37 +00008159 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008160 return;
8161 }
8162
8163 LocationSummary* locations = insn->GetLocations();
8164 Location out = locations->Out();
8165 Register const_area = locations->InAt(0).AsRegister<Register>();
8166 HConstant *value = insn->GetConstant();
8167
8168 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008169 case DataType::Type::kFloat32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008170 __ movss(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008171 codegen_->LiteralFloatAddress(
8172 value->AsFloatConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008173 break;
8174
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008175 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008176 __ movsd(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008177 codegen_->LiteralDoubleAddress(
8178 value->AsDoubleConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008179 break;
8180
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008181 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008182 __ movl(out.AsRegister<Register>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008183 codegen_->LiteralInt32Address(
8184 value->AsIntConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008185 break;
8186
8187 default:
8188 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8189 }
8190}
8191
Mark Mendell0616ae02015-04-17 12:49:27 -04008192/**
8193 * Class to handle late fixup of offsets into constant area.
8194 */
Vladimir Marko5233f932015-09-29 19:01:15 +01008195class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
Mark Mendell0616ae02015-04-17 12:49:27 -04008196 public:
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008197 RIPFixup(CodeGeneratorX86& codegen,
8198 HX86ComputeBaseMethodAddress* base_method_address,
8199 size_t offset)
8200 : codegen_(&codegen),
8201 base_method_address_(base_method_address),
8202 offset_into_constant_area_(offset) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008203
8204 protected:
8205 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
8206
8207 CodeGeneratorX86* codegen_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008208 HX86ComputeBaseMethodAddress* base_method_address_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008209
8210 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01008211 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell0616ae02015-04-17 12:49:27 -04008212 // Patch the correct offset for the instruction. The place to patch is the
8213 // last 4 bytes of the instruction.
8214 // The value to patch is the distance from the offset in the constant area
8215 // from the address computed by the HX86ComputeBaseMethodAddress instruction.
Mark Mendell805b3b52015-09-18 14:10:29 -04008216 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008217 int32_t relative_position =
8218 constant_offset - codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell0616ae02015-04-17 12:49:27 -04008219
8220 // Patch in the right value.
8221 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
8222 }
8223
Mark Mendell0616ae02015-04-17 12:49:27 -04008224 // Location in constant area that the fixup refers to.
Mark Mendell805b3b52015-09-18 14:10:29 -04008225 int32_t offset_into_constant_area_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008226};
8227
Mark Mendell805b3b52015-09-18 14:10:29 -04008228/**
8229 * Class to handle late fixup of offsets to a jump table that will be created in the
8230 * constant area.
8231 */
8232class JumpTableRIPFixup : public RIPFixup {
8233 public:
8234 JumpTableRIPFixup(CodeGeneratorX86& codegen, HX86PackedSwitch* switch_instr)
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008235 : RIPFixup(codegen, switch_instr->GetBaseMethodAddress(), static_cast<size_t>(-1)),
8236 switch_instr_(switch_instr) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008237
8238 void CreateJumpTable() {
8239 X86Assembler* assembler = codegen_->GetAssembler();
8240
8241 // Ensure that the reference to the jump table has the correct offset.
8242 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
8243 SetOffset(offset_in_constant_table);
8244
8245 // The label values in the jump table are computed relative to the
8246 // instruction addressing the constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008247 const int32_t relative_offset = codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell805b3b52015-09-18 14:10:29 -04008248
8249 // Populate the jump table with the correct values for the jump table.
8250 int32_t num_entries = switch_instr_->GetNumEntries();
8251 HBasicBlock* block = switch_instr_->GetBlock();
8252 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
8253 // The value that we want is the target offset - the position of the table.
8254 for (int32_t i = 0; i < num_entries; i++) {
8255 HBasicBlock* b = successors[i];
8256 Label* l = codegen_->GetLabelOf(b);
8257 DCHECK(l->IsBound());
8258 int32_t offset_to_block = l->Position() - relative_offset;
8259 assembler->AppendInt32(offset_to_block);
8260 }
8261 }
8262
8263 private:
8264 const HX86PackedSwitch* switch_instr_;
8265};
8266
8267void CodeGeneratorX86::Finalize(CodeAllocator* allocator) {
8268 // Generate the constant area if needed.
8269 X86Assembler* assembler = GetAssembler();
jaishank20d1c942019-03-08 15:08:17 +05308270
Mark Mendell805b3b52015-09-18 14:10:29 -04008271 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
8272 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8
8273 // byte values.
8274 assembler->Align(4, 0);
8275 constant_area_start_ = assembler->CodeSize();
8276
8277 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008278 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell805b3b52015-09-18 14:10:29 -04008279 jump_table->CreateJumpTable();
8280 }
8281
8282 // And now add the constant area to the generated code.
8283 assembler->AddConstantArea();
8284 }
8285
8286 // And finish up.
8287 CodeGenerator::Finalize(allocator);
8288}
8289
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008290Address CodeGeneratorX86::LiteralDoubleAddress(double v,
8291 HX86ComputeBaseMethodAddress* method_base,
8292 Register reg) {
8293 AssemblerFixup* fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008294 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddDouble(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008295 return Address(reg, kDummy32BitOffset, fixup);
8296}
8297
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008298Address CodeGeneratorX86::LiteralFloatAddress(float v,
8299 HX86ComputeBaseMethodAddress* method_base,
8300 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008301 AssemblerFixup* fixup =
8302 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddFloat(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008303 return Address(reg, kDummy32BitOffset, fixup);
8304}
8305
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008306Address CodeGeneratorX86::LiteralInt32Address(int32_t v,
8307 HX86ComputeBaseMethodAddress* method_base,
8308 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008309 AssemblerFixup* fixup =
8310 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt32(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008311 return Address(reg, kDummy32BitOffset, fixup);
8312}
8313
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008314Address CodeGeneratorX86::LiteralInt64Address(int64_t v,
8315 HX86ComputeBaseMethodAddress* method_base,
8316 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008317 AssemblerFixup* fixup =
8318 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt64(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008319 return Address(reg, kDummy32BitOffset, fixup);
8320}
8321
Aart Bika19616e2016-02-01 18:57:58 -08008322void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) {
8323 if (value == 0) {
8324 __ xorl(dest, dest);
8325 } else {
8326 __ movl(dest, Immediate(value));
8327 }
8328}
8329
8330void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) {
8331 if (value == 0) {
8332 __ testl(dest, dest);
8333 } else {
8334 __ cmpl(dest, Immediate(value));
8335 }
8336}
8337
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008338void CodeGeneratorX86::GenerateIntCompare(Location lhs, Location rhs) {
8339 Register lhs_reg = lhs.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07008340 GenerateIntCompare(lhs_reg, rhs);
8341}
8342
8343void CodeGeneratorX86::GenerateIntCompare(Register lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008344 if (rhs.IsConstant()) {
8345 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07008346 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008347 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07008348 __ cmpl(lhs, Address(ESP, rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008349 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07008350 __ cmpl(lhs, rhs.AsRegister<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008351 }
8352}
8353
8354Address CodeGeneratorX86::ArrayAddress(Register obj,
8355 Location index,
8356 ScaleFactor scale,
8357 uint32_t data_offset) {
8358 return index.IsConstant() ?
8359 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
8360 Address(obj, index.AsRegister<Register>(), scale, data_offset);
8361}
8362
Mark Mendell805b3b52015-09-18 14:10:29 -04008363Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
8364 Register reg,
8365 Register value) {
8366 // Create a fixup to be used to create and address the jump table.
8367 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008368 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell805b3b52015-09-18 14:10:29 -04008369
8370 // We have to populate the jump tables.
8371 fixups_to_jump_tables_.push_back(table_fixup);
8372
8373 // We want a scaled address, as we are extracting the correct offset from the table.
8374 return Address(reg, value, TIMES_4, kDummy32BitOffset, table_fixup);
8375}
8376
Andreas Gampe85b62f22015-09-09 13:15:38 -07008377// TODO: target as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008378void CodeGeneratorX86::MoveFromReturnRegister(Location target, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07008379 if (!target.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008380 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008381 return;
8382 }
8383
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008384 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008385
8386 Location return_loc = InvokeDexCallingConventionVisitorX86().GetReturnLocation(type);
8387 if (target.Equals(return_loc)) {
8388 return;
8389 }
8390
8391 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
8392 // with the else branch.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008393 if (type == DataType::Type::kInt64) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008394 HParallelMove parallel_move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008395 parallel_move.AddMove(return_loc.ToLow(), target.ToLow(), DataType::Type::kInt32, nullptr);
8396 parallel_move.AddMove(return_loc.ToHigh(), target.ToHigh(), DataType::Type::kInt32, nullptr);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008397 GetMoveResolver()->EmitNativeCode(&parallel_move);
8398 } else {
8399 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01008400 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07008401 parallel_move.AddMove(return_loc, target, type, nullptr);
8402 GetMoveResolver()->EmitNativeCode(&parallel_move);
8403 }
8404}
8405
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008406void CodeGeneratorX86::PatchJitRootUse(uint8_t* code,
8407 const uint8_t* roots_data,
8408 const PatchInfo<Label>& info,
8409 uint64_t index_in_table) const {
8410 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
8411 uintptr_t address =
8412 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00008413 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008414 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
8415 dchecked_integral_cast<uint32_t>(address);
8416}
8417
Nicolas Geoffray132d8362016-11-16 09:19:42 +00008418void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
8419 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008420 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01008421 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008422 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008423 }
8424
8425 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008426 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01008427 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008428 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00008429 }
8430}
8431
xueliang.zhonge0eb4832017-10-30 13:43:14 +00008432void LocationsBuilderX86::VisitIntermediateAddress(HIntermediateAddress* instruction
8433 ATTRIBUTE_UNUSED) {
8434 LOG(FATAL) << "Unreachable";
8435}
8436
8437void InstructionCodeGeneratorX86::VisitIntermediateAddress(HIntermediateAddress* instruction
8438 ATTRIBUTE_UNUSED) {
8439 LOG(FATAL) << "Unreachable";
8440}
8441
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05308442bool LocationsBuilderX86::CpuHasAvxFeatureFlag() {
8443 return codegen_->GetInstructionSetFeatures().HasAVX();
8444}
8445bool LocationsBuilderX86::CpuHasAvx2FeatureFlag() {
8446 return codegen_->GetInstructionSetFeatures().HasAVX2();
8447}
8448bool InstructionCodeGeneratorX86::CpuHasAvxFeatureFlag() {
8449 return codegen_->GetInstructionSetFeatures().HasAVX();
8450}
8451bool InstructionCodeGeneratorX86::CpuHasAvx2FeatureFlag() {
8452 return codegen_->GetInstructionSetFeatures().HasAVX2();
8453}
8454
Roland Levillain4d027112015-07-01 15:41:14 +01008455#undef __
8456
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00008457} // namespace x86
8458} // namespace art