blob: 112d71023f4f1c6f143f8f359648a2c4610ecfea [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010018
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +000024#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010025#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010026#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "intrinsics.h"
29#include "intrinsics_x86.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070031#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070032#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070033#include "mirror/class-inl.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010034#include "thread.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000035#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010036#include "utils/stack_checks.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000037#include "utils/x86/assembler_x86.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010038#include "utils/x86/managed_register_x86.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000039
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000040namespace art {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010041
Roland Levillain0d5a2812015-11-13 10:07:31 +000042template<class MirrorType>
43class GcRoot;
44
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000045namespace x86 {
46
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010047static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010048static constexpr Register kMethodRegisterArgument = EAX;
Mark Mendell5f874182015-03-04 15:42:45 -050049static constexpr Register kCoreCalleeSaves[] = { EBP, ESI, EDI };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010050
Mark Mendell24f2dfa2015-01-14 19:51:45 -050051static constexpr int kC2ConditionMask = 0x400;
52
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000053static constexpr int kFakeReturnRegister = Register(8);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +000054
Aart Bik1f8d51b2018-02-15 10:42:37 -080055static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
56static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
57
Vladimir Marko3232dbb2018-07-25 15:42:46 +010058static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
59 InvokeRuntimeCallingConvention calling_convention;
60 RegisterSet caller_saves = RegisterSet::Empty();
61 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
62 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
63 // that the the kPrimNot result register is the same as the first argument register.
64 return caller_saves;
65}
66
Roland Levillain7cbd27f2016-08-11 23:53:33 +010067// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
68#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070069#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010070
Andreas Gampe85b62f22015-09-09 13:15:38 -070071class NullCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010072 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000073 explicit NullCheckSlowPathX86(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010075 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +010076 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000078 if (instruction_->CanThrowIntoCatchBlock()) {
79 // Live registers will be restored in the catch block if caught.
80 SaveLiveRegisters(codegen, instruction_->GetLocations());
81 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010082 x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexandre Rames8158f282015-08-07 10:26:17 +010083 instruction_,
84 instruction_->GetDexPc(),
85 this);
Roland Levillain888d0672015-11-23 18:53:50 +000086 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010087 }
88
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010089 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010090
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010091 const char* GetDescription() const override { return "NullCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010092
Nicolas Geoffraye5038322014-07-04 09:41:32 +010093 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010094 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
95};
96
Andreas Gampe85b62f22015-09-09 13:15:38 -070097class DivZeroCheckSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000098 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000099 explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000100
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100101 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000103 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100104 x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000105 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000106 }
107
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100108 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100109
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100110 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100111
Calin Juravled0d48522014-11-04 16:40:20 +0000112 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000113 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
114};
115
Andreas Gampe85b62f22015-09-09 13:15:38 -0700116class DivRemMinusOneSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000117 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000118 DivRemMinusOneSlowPathX86(HInstruction* instruction, Register reg, bool is_div)
119 : SlowPathCode(instruction), reg_(reg), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000120
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100121 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000122 __ Bind(GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 if (is_div_) {
124 __ negl(reg_);
125 } else {
126 __ movl(reg_, Immediate(0));
127 }
Calin Juravled0d48522014-11-04 16:40:20 +0000128 __ jmp(GetExitLabel());
129 }
130
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100131 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100132
Calin Juravled0d48522014-11-04 16:40:20 +0000133 private:
134 Register reg_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 bool is_div_;
136 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
Calin Juravled0d48522014-11-04 16:40:20 +0000137};
138
Andreas Gampe85b62f22015-09-09 13:15:38 -0700139class BoundsCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100140 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000141 explicit BoundsCheckSlowPathX86(HBoundsCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100142
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100143 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100144 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100145 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100146 __ Bind(GetEntryLabel());
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000147 // We're moving two locations to locations that could overlap, so we need a parallel
148 // move resolver.
David Brazdil77a48ae2015-09-15 12:34:04 +0000149 if (instruction_->CanThrowIntoCatchBlock()) {
150 // Live registers will be restored in the catch block if caught.
151 SaveLiveRegisters(codegen, instruction_->GetLocations());
152 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400153
154 // Are we using an array length from memory?
155 HInstruction* array_length = instruction_->InputAt(1);
156 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100157 InvokeRuntimeCallingConvention calling_convention;
Mark Mendellee8d9712016-07-12 11:13:15 -0400158 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
159 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100160 HArrayLength* length = array_length->AsArrayLength();
161 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400162 Location array_loc = array_length->GetLocations()->InAt(0);
163 Address array_len(array_loc.AsRegister<Register>(), len_offset);
164 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
165 // Check for conflicts with index.
166 if (length_loc.Equals(locations->InAt(0))) {
167 // We know we aren't using parameter 2.
168 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
169 }
170 __ movl(length_loc.AsRegister<Register>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100171 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100172 __ shrl(length_loc.AsRegister<Register>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700173 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400174 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000175 x86_codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100176 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000177 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100178 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400179 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100180 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100181 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100182 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
183 ? kQuickThrowStringBounds
184 : kQuickThrowArrayBounds;
185 x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100186 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000187 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188 }
189
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100190 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100191
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100192 const char* GetDescription() const override { return "BoundsCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100193
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100194 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100195 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
196};
197
Andreas Gampe85b62f22015-09-09 13:15:38 -0700198class SuspendCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000199 public:
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000200 SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000201 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000202
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100203 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700204 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100205 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000206 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700207 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100208 x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000209 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700210 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100211 if (successor_ == nullptr) {
212 __ jmp(GetReturnLabel());
213 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100214 __ jmp(x86_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100215 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000216 }
217
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100218 Label* GetReturnLabel() {
219 DCHECK(successor_ == nullptr);
220 return &return_label_;
221 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000222
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100223 HBasicBlock* GetSuccessor() const {
224 return successor_;
225 }
226
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100227 const char* GetDescription() const override { return "SuspendCheckSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100228
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000229 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100230 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000231 Label return_label_;
232
233 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
234};
235
Vladimir Markoaad75c62016-10-03 08:46:48 +0000236class LoadStringSlowPathX86 : public SlowPathCode {
237 public:
238 explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {}
239
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100240 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000241 LocationSummary* locations = instruction_->GetLocations();
242 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
243
244 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
245 __ Bind(GetEntryLabel());
246 SaveLiveRegisters(codegen, locations);
247
248 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000249 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
250 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000251 x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
252 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
253 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
254 RestoreLiveRegisters(codegen, locations);
255
Vladimir Markoaad75c62016-10-03 08:46:48 +0000256 __ jmp(GetExitLabel());
257 }
258
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100259 const char* GetDescription() const override { return "LoadStringSlowPathX86"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000260
261 private:
262 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
263};
264
Andreas Gampe85b62f22015-09-09 13:15:38 -0700265class LoadClassSlowPathX86 : public SlowPathCode {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100267 LoadClassSlowPathX86(HLoadClass* cls, HInstruction* at)
268 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100270 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 }
272
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100273 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000274 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100275 Location out = locations->Out();
276 const uint32_t dex_pc = instruction_->GetDexPc();
277 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
278 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
279
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000280 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
281 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000282 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000283
284 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100285 if (must_resolve_type) {
286 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_codegen->GetGraph()->GetDexFile()));
287 dex::TypeIndex type_index = cls_->GetTypeIndex();
288 __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100289 x86_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
290 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100291 // If we also must_do_clinit, the resolved type is now in the correct register.
292 } else {
293 DCHECK(must_do_clinit);
294 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
295 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), source);
296 }
297 if (must_do_clinit) {
298 x86_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
299 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000300 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000301
302 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000303 if (out.IsValid()) {
304 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
305 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000306 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000307 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000308 __ jmp(GetExitLabel());
309 }
310
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100311 const char* GetDescription() const override { return "LoadClassSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100312
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000313 private:
314 // The class this slow path will load.
315 HLoadClass* const cls_;
316
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000317 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
318};
319
Andreas Gampe85b62f22015-09-09 13:15:38 -0700320class TypeCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000321 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000322 TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000323 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000324
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100325 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000327 DCHECK(instruction_->IsCheckCast()
328 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000329
330 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
331 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000332
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000333 if (kPoisonHeapReferences &&
334 instruction_->IsCheckCast() &&
335 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
336 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
337 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<Register>());
338 }
339
Vladimir Marko87584542017-12-12 17:47:52 +0000340 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000341 SaveLiveRegisters(codegen, locations);
342 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343
344 // We're moving two locations to locations that could overlap, so we need a parallel
345 // move resolver.
346 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800347 x86_codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800348 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100349 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800350 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800351 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100352 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000353 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100354 x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Alexandre Rames8158f282015-08-07 10:26:17 +0100355 instruction_,
356 instruction_->GetDexPc(),
357 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800358 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 x86_codegen->InvokeRuntime(kQuickCheckInstanceOf,
362 instruction_,
363 instruction_->GetDexPc(),
364 this);
365 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 }
367
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 if (!is_fatal_) {
369 if (instruction_->IsInstanceOf()) {
370 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
371 }
372 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray75374372015-09-17 17:12:19 +0000373
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000374 __ jmp(GetExitLabel());
375 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000376 }
377
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100378 const char* GetDescription() const override { return "TypeCheckSlowPathX86"; }
379 bool IsFatal() const override { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100380
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000382 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000383
384 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
385};
386
Andreas Gampe85b62f22015-09-09 13:15:38 -0700387class DeoptimizationSlowPathX86 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700388 public:
Aart Bik42249c32016-01-07 15:33:50 -0800389 explicit DeoptimizationSlowPathX86(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000390 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700391
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100392 void EmitNativeCode(CodeGenerator* codegen) override {
Alexandre Rames8158f282015-08-07 10:26:17 +0100393 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700394 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100395 LocationSummary* locations = instruction_->GetLocations();
396 SaveLiveRegisters(codegen, locations);
397 InvokeRuntimeCallingConvention calling_convention;
398 x86_codegen->Load32BitValue(
399 calling_convention.GetRegisterAt(0),
400 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100401 x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100402 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 }
404
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100405 const char* GetDescription() const override { return "DeoptimizationSlowPathX86"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100406
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700408 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
409};
410
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100411class ArraySetSlowPathX86 : public SlowPathCode {
412 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000413 explicit ArraySetSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100414
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100415 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100416 LocationSummary* locations = instruction_->GetLocations();
417 __ Bind(GetEntryLabel());
418 SaveLiveRegisters(codegen, locations);
419
420 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100421 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100422 parallel_move.AddMove(
423 locations->InAt(0),
424 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100425 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100426 nullptr);
427 parallel_move.AddMove(
428 locations->InAt(1),
429 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100430 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100431 nullptr);
432 parallel_move.AddMove(
433 locations->InAt(2),
434 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100435 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100436 nullptr);
437 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
438
439 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100440 x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000441 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100442 RestoreLiveRegisters(codegen, locations);
443 __ jmp(GetExitLabel());
444 }
445
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100446 const char* GetDescription() const override { return "ArraySetSlowPathX86"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100447
448 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100449 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86);
450};
451
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100452// Slow path marking an object reference `ref` during a read
453// barrier. The field `obj.field` in the object `obj` holding this
454// reference does not get updated by this slow path after marking (see
455// ReadBarrierMarkAndUpdateFieldSlowPathX86 below for that).
456//
457// This means that after the execution of this slow path, `ref` will
458// always be up-to-date, but `obj.field` may not; i.e., after the
459// flip, `ref` will be a to-space reference, but `obj.field` will
460// probably still be a from-space reference (unless it gets updated by
461// another thread, or if another thread installed another object
462// reference (different from `ref`) in `obj.field`).
Roland Levillain7c1559a2015-12-15 10:55:36 +0000463class ReadBarrierMarkSlowPathX86 : public SlowPathCode {
464 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100465 ReadBarrierMarkSlowPathX86(HInstruction* instruction,
466 Location ref,
467 bool unpoison_ref_before_marking)
468 : SlowPathCode(instruction),
469 ref_(ref),
470 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000471 DCHECK(kEmitCompilerReadBarrier);
472 }
473
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100474 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86"; }
Roland Levillain7c1559a2015-12-15 10:55:36 +0000475
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100476 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000477 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100478 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +0000479 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000481 DCHECK(instruction_->IsInstanceFieldGet() ||
482 instruction_->IsStaticFieldGet() ||
483 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100484 instruction_->IsArraySet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000485 instruction_->IsLoadClass() ||
486 instruction_->IsLoadString() ||
487 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100488 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100489 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
490 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000491 << "Unexpected instruction in read barrier marking slow path: "
492 << instruction_->DebugName();
493
494 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100495 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000496 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100497 __ MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000498 }
Roland Levillain4359e612016-07-20 11:32:19 +0100499 // No need to save live registers; it's taken care of by the
500 // entrypoint. Also, there is no need to update the stack mask,
501 // as this runtime call will not trigger a garbage collection.
Roland Levillain7c1559a2015-12-15 10:55:36 +0000502 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 DCHECK_NE(ref_reg, ESP);
504 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100505 // "Compact" slow path, saving two moves.
506 //
507 // Instead of using the standard runtime calling convention (input
508 // and output in EAX):
509 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100510 // EAX <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100511 // EAX <- ReadBarrierMark(EAX)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100512 // ref <- EAX
Roland Levillain02b75802016-07-13 11:54:35 +0100513 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100514 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100515 // of a dedicated entrypoint:
516 //
517 // rX <- ReadBarrierMarkRegX(rX)
518 //
Roland Levillain97c46462017-05-11 14:04:03 +0100519 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100520 // This runtime call does not require a stack map.
521 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000522 __ jmp(GetExitLabel());
523 }
524
525 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 // The location (register) of the marked object reference.
527 const Location ref_;
528 // Should the reference in `ref_` be unpoisoned prior to marking it?
529 const bool unpoison_ref_before_marking_;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000530
531 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86);
532};
533
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100534// Slow path marking an object reference `ref` during a read barrier,
535// and if needed, atomically updating the field `obj.field` in the
536// object `obj` holding this reference after marking (contrary to
537// ReadBarrierMarkSlowPathX86 above, which never tries to update
538// `obj.field`).
539//
540// This means that after the execution of this slow path, both `ref`
541// and `obj.field` will be up-to-date; i.e., after the flip, both will
542// hold the same to-space reference (unless another thread installed
543// another object reference (different from `ref`) in `obj.field`).
544class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
545 public:
546 ReadBarrierMarkAndUpdateFieldSlowPathX86(HInstruction* instruction,
547 Location ref,
548 Register obj,
549 const Address& field_addr,
550 bool unpoison_ref_before_marking,
551 Register temp)
552 : SlowPathCode(instruction),
553 ref_(ref),
554 obj_(obj),
555 field_addr_(field_addr),
556 unpoison_ref_before_marking_(unpoison_ref_before_marking),
557 temp_(temp) {
558 DCHECK(kEmitCompilerReadBarrier);
559 }
560
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100561 const char* GetDescription() const override { return "ReadBarrierMarkAndUpdateFieldSlowPathX86"; }
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100562
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100563 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100564 LocationSummary* locations = instruction_->GetLocations();
565 Register ref_reg = ref_.AsRegister<Register>();
566 DCHECK(locations->CanCall());
567 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
568 // This slow path is only used by the UnsafeCASObject intrinsic.
569 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
570 << "Unexpected instruction in read barrier marking and field updating slow path: "
571 << instruction_->DebugName();
572 DCHECK(instruction_->GetLocations()->Intrinsified());
573 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
574
575 __ Bind(GetEntryLabel());
576 if (unpoison_ref_before_marking_) {
577 // Object* ref = ref_addr->AsMirrorPtr()
578 __ MaybeUnpoisonHeapReference(ref_reg);
579 }
580
581 // Save the old (unpoisoned) reference.
582 __ movl(temp_, ref_reg);
583
584 // No need to save live registers; it's taken care of by the
585 // entrypoint. Also, there is no need to update the stack mask,
586 // as this runtime call will not trigger a garbage collection.
587 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
588 DCHECK_NE(ref_reg, ESP);
589 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
590 // "Compact" slow path, saving two moves.
591 //
592 // Instead of using the standard runtime calling convention (input
593 // and output in EAX):
594 //
595 // EAX <- ref
596 // EAX <- ReadBarrierMark(EAX)
597 // ref <- EAX
598 //
599 // we just use rX (the register containing `ref`) as input and output
600 // of a dedicated entrypoint:
601 //
602 // rX <- ReadBarrierMarkRegX(rX)
603 //
Roland Levillain97c46462017-05-11 14:04:03 +0100604 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100605 // This runtime call does not require a stack map.
606 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
607
608 // If the new reference is different from the old reference,
609 // update the field in the holder (`*field_addr`).
610 //
611 // Note that this field could also hold a different object, if
612 // another thread had concurrently changed it. In that case, the
613 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
614 // operation below would abort the CAS, leaving the field as-is.
615 NearLabel done;
616 __ cmpl(temp_, ref_reg);
617 __ j(kEqual, &done);
618
619 // Update the the holder's field atomically. This may fail if
620 // mutator updates before us, but it's OK. This is achieved
621 // using a strong compare-and-set (CAS) operation with relaxed
622 // memory synchronization ordering, where the expected value is
623 // the old reference and the desired value is the new reference.
624 // This operation is implemented with a 32-bit LOCK CMPXLCHG
625 // instruction, which requires the expected value (the old
626 // reference) to be in EAX. Save EAX beforehand, and move the
627 // expected value (stored in `temp_`) into EAX.
628 __ pushl(EAX);
629 __ movl(EAX, temp_);
630
631 // Convenience aliases.
632 Register base = obj_;
633 Register expected = EAX;
634 Register value = ref_reg;
635
636 bool base_equals_value = (base == value);
637 if (kPoisonHeapReferences) {
638 if (base_equals_value) {
639 // If `base` and `value` are the same register location, move
640 // `value` to a temporary register. This way, poisoning
641 // `value` won't invalidate `base`.
642 value = temp_;
643 __ movl(value, base);
644 }
645
646 // Check that the register allocator did not assign the location
647 // of `expected` (EAX) to `value` nor to `base`, so that heap
648 // poisoning (when enabled) works as intended below.
649 // - If `value` were equal to `expected`, both references would
650 // be poisoned twice, meaning they would not be poisoned at
651 // all, as heap poisoning uses address negation.
652 // - If `base` were equal to `expected`, poisoning `expected`
653 // would invalidate `base`.
654 DCHECK_NE(value, expected);
655 DCHECK_NE(base, expected);
656
657 __ PoisonHeapReference(expected);
658 __ PoisonHeapReference(value);
659 }
660
661 __ LockCmpxchgl(field_addr_, value);
662
663 // If heap poisoning is enabled, we need to unpoison the values
664 // that were poisoned earlier.
665 if (kPoisonHeapReferences) {
666 if (base_equals_value) {
667 // `value` has been moved to a temporary register, no need
668 // to unpoison it.
669 } else {
670 __ UnpoisonHeapReference(value);
671 }
672 // No need to unpoison `expected` (EAX), as it is be overwritten below.
673 }
674
675 // Restore EAX.
676 __ popl(EAX);
677
678 __ Bind(&done);
679 __ jmp(GetExitLabel());
680 }
681
682 private:
683 // The location (register) of the marked object reference.
684 const Location ref_;
685 // The register containing the object holding the marked object reference field.
686 const Register obj_;
687 // The address of the marked reference field. The base of this address must be `obj_`.
688 const Address field_addr_;
689
690 // Should the reference in `ref_` be unpoisoned prior to marking it?
691 const bool unpoison_ref_before_marking_;
692
693 const Register temp_;
694
695 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86);
696};
697
Roland Levillain0d5a2812015-11-13 10:07:31 +0000698// Slow path generating a read barrier for a heap reference.
699class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
700 public:
701 ReadBarrierForHeapReferenceSlowPathX86(HInstruction* instruction,
702 Location out,
703 Location ref,
704 Location obj,
705 uint32_t offset,
706 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000707 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000708 out_(out),
709 ref_(ref),
710 obj_(obj),
711 offset_(offset),
712 index_(index) {
713 DCHECK(kEmitCompilerReadBarrier);
714 // If `obj` is equal to `out` or `ref`, it means the initial object
715 // has been overwritten by (or after) the heap object reference load
716 // to be instrumented, e.g.:
717 //
718 // __ movl(out, Address(out, offset));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000719 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000720 //
721 // In that case, we have lost the information about the original
722 // object, and the emitted read barrier cannot work properly.
723 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
724 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
725 }
726
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100727 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000728 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
729 LocationSummary* locations = instruction_->GetLocations();
730 Register reg_out = out_.AsRegister<Register>();
731 DCHECK(locations->CanCall());
732 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +0100733 DCHECK(instruction_->IsInstanceFieldGet() ||
734 instruction_->IsStaticFieldGet() ||
735 instruction_->IsArrayGet() ||
736 instruction_->IsInstanceOf() ||
737 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700738 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000739 << "Unexpected instruction in read barrier for heap reference slow path: "
740 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000741
742 __ Bind(GetEntryLabel());
743 SaveLiveRegisters(codegen, locations);
744
745 // We may have to change the index's value, but as `index_` is a
746 // constant member (like other "inputs" of this slow path),
747 // introduce a copy of it, `index`.
748 Location index = index_;
749 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100750 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000751 if (instruction_->IsArrayGet()) {
752 // Compute the actual memory offset and store it in `index`.
753 Register index_reg = index_.AsRegister<Register>();
754 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
755 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
756 // We are about to change the value of `index_reg` (see the
757 // calls to art::x86::X86Assembler::shll and
758 // art::x86::X86Assembler::AddImmediate below), but it has
759 // not been saved by the previous call to
760 // art::SlowPathCode::SaveLiveRegisters, as it is a
761 // callee-save register --
762 // art::SlowPathCode::SaveLiveRegisters does not consider
763 // callee-save registers, as it has been designed with the
764 // assumption that callee-save registers are supposed to be
765 // handled by the called function. So, as a callee-save
766 // register, `index_reg` _would_ eventually be saved onto
767 // the stack, but it would be too late: we would have
768 // changed its value earlier. Therefore, we manually save
769 // it here into another freely available register,
770 // `free_reg`, chosen of course among the caller-save
771 // registers (as a callee-save `free_reg` register would
772 // exhibit the same problem).
773 //
774 // Note we could have requested a temporary register from
775 // the register allocator instead; but we prefer not to, as
776 // this is a slow path, and we know we can find a
777 // caller-save register that is available.
778 Register free_reg = FindAvailableCallerSaveRegister(codegen);
779 __ movl(free_reg, index_reg);
780 index_reg = free_reg;
781 index = Location::RegisterLocation(index_reg);
782 } else {
783 // The initial register stored in `index_` has already been
784 // saved in the call to art::SlowPathCode::SaveLiveRegisters
785 // (as it is not a callee-save register), so we can freely
786 // use it.
787 }
788 // Shifting the index value contained in `index_reg` by the scale
789 // factor (2) cannot overflow in practice, as the runtime is
790 // unable to allocate object arrays with a size larger than
791 // 2^26 - 1 (that is, 2^28 - 4 bytes).
792 __ shll(index_reg, Immediate(TIMES_4));
793 static_assert(
794 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
795 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
796 __ AddImmediate(index_reg, Immediate(offset_));
797 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100798 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
799 // intrinsics, `index_` is not shifted by a scale factor of 2
800 // (as in the case of ArrayGet), as it is actually an offset
801 // to an object field within an object.
802 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000803 DCHECK(instruction_->GetLocations()->Intrinsified());
804 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
805 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
806 << instruction_->AsInvoke()->GetIntrinsic();
807 DCHECK_EQ(offset_, 0U);
808 DCHECK(index_.IsRegisterPair());
809 // UnsafeGet's offset location is a register pair, the low
810 // part contains the correct offset.
811 index = index_.ToLow();
812 }
813 }
814
815 // We're moving two or three locations to locations that could
816 // overlap, so we need a parallel move resolver.
817 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100818 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000819 parallel_move.AddMove(ref_,
820 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100821 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000822 nullptr);
823 parallel_move.AddMove(obj_,
824 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100825 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000826 nullptr);
827 if (index.IsValid()) {
828 parallel_move.AddMove(index,
829 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100830 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000831 nullptr);
832 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
833 } else {
834 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
835 __ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
836 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100837 x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000838 CheckEntrypointTypes<
839 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
840 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
841
842 RestoreLiveRegisters(codegen, locations);
843 __ jmp(GetExitLabel());
844 }
845
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100846 const char* GetDescription() const override { return "ReadBarrierForHeapReferenceSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000847
848 private:
849 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
850 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
851 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
852 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
853 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
854 return static_cast<Register>(i);
855 }
856 }
857 // We shall never fail to find a free caller-save register, as
858 // there are more than two core caller-save registers on x86
859 // (meaning it is possible to find one which is different from
860 // `ref` and `obj`).
861 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
862 LOG(FATAL) << "Could not find a free caller-save register";
863 UNREACHABLE();
864 }
865
Roland Levillain0d5a2812015-11-13 10:07:31 +0000866 const Location out_;
867 const Location ref_;
868 const Location obj_;
869 const uint32_t offset_;
870 // An additional location containing an index to an array.
871 // Only used for HArrayGet and the UnsafeGetObject &
872 // UnsafeGetObjectVolatile intrinsics.
873 const Location index_;
874
875 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86);
876};
877
878// Slow path generating a read barrier for a GC root.
879class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
880 public:
881 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000882 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000883 DCHECK(kEmitCompilerReadBarrier);
884 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000885
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100886 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000887 LocationSummary* locations = instruction_->GetLocations();
888 Register reg_out = out_.AsRegister<Register>();
889 DCHECK(locations->CanCall());
890 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000891 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
892 << "Unexpected instruction in read barrier for GC root slow path: "
893 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000894
895 __ Bind(GetEntryLabel());
896 SaveLiveRegisters(codegen, locations);
897
898 InvokeRuntimeCallingConvention calling_convention;
899 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
900 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100901 x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000902 instruction_,
903 instruction_->GetDexPc(),
904 this);
905 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
906 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
907
908 RestoreLiveRegisters(codegen, locations);
909 __ jmp(GetExitLabel());
910 }
911
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100912 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000913
914 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000915 const Location out_;
916 const Location root_;
917
918 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86);
919};
920
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100921#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100922// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
923#define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100924
Aart Bike9f37602015-10-09 11:15:55 -0700925inline Condition X86Condition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700926 switch (cond) {
927 case kCondEQ: return kEqual;
928 case kCondNE: return kNotEqual;
929 case kCondLT: return kLess;
930 case kCondLE: return kLessEqual;
931 case kCondGT: return kGreater;
932 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700933 case kCondB: return kBelow;
934 case kCondBE: return kBelowEqual;
935 case kCondA: return kAbove;
936 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700937 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100938 LOG(FATAL) << "Unreachable";
939 UNREACHABLE();
940}
941
Aart Bike9f37602015-10-09 11:15:55 -0700942// Maps signed condition to unsigned condition and FP condition to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100943inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
944 switch (cond) {
945 case kCondEQ: return kEqual;
946 case kCondNE: return kNotEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700947 // Signed to unsigned, and FP to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100948 case kCondLT: return kBelow;
949 case kCondLE: return kBelowEqual;
950 case kCondGT: return kAbove;
951 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700952 // Unsigned remain unchanged.
953 case kCondB: return kBelow;
954 case kCondBE: return kBelowEqual;
955 case kCondA: return kAbove;
956 case kCondAE: return kAboveEqual;
Roland Levillain4fa13f62015-07-06 18:11:54 +0100957 }
958 LOG(FATAL) << "Unreachable";
959 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700960}
961
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100962void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100963 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100964}
965
966void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100967 stream << XmmRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100968}
969
Vladimir Markoa0431112018-06-25 09:32:54 +0100970const X86InstructionSetFeatures& CodeGeneratorX86::GetInstructionSetFeatures() const {
971 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86InstructionSetFeatures();
972}
973
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100974size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
975 __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
976 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100977}
978
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100979size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
980 __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
981 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100982}
983
Mark Mendell7c8d0092015-01-26 11:21:33 -0500984size_t CodeGeneratorX86::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700985 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700986 __ movups(Address(ESP, stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -0700987 } else {
988 __ movsd(Address(ESP, stack_index), XmmRegister(reg_id));
989 }
Mark Mendell7c8d0092015-01-26 11:21:33 -0500990 return GetFloatingPointSpillSlotSize();
991}
992
993size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700994 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700995 __ movups(XmmRegister(reg_id), Address(ESP, stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -0700996 } else {
997 __ movsd(XmmRegister(reg_id), Address(ESP, stack_index));
998 }
Mark Mendell7c8d0092015-01-26 11:21:33 -0500999 return GetFloatingPointSpillSlotSize();
1000}
1001
Calin Juravle175dc732015-08-25 15:42:32 +01001002void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
1003 HInstruction* instruction,
1004 uint32_t dex_pc,
1005 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001006 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001007 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
1008 if (EntrypointRequiresStackMap(entrypoint)) {
1009 RecordPcInfo(instruction, dex_pc, slow_path);
1010 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001011}
1012
Roland Levillaindec8f632016-07-22 17:10:06 +01001013void CodeGeneratorX86::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1014 HInstruction* instruction,
1015 SlowPathCode* slow_path) {
1016 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001017 GenerateInvokeRuntime(entry_point_offset);
1018}
1019
1020void CodeGeneratorX86::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001021 __ fs()->call(Address::Absolute(entry_point_offset));
1022}
1023
Mark Mendellfb8d2792015-03-31 22:16:59 -04001024CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001025 const CompilerOptions& compiler_options,
1026 OptimizingCompilerStats* stats)
Mark Mendell5f874182015-03-04 15:42:45 -05001027 : CodeGenerator(graph,
1028 kNumberOfCpuRegisters,
1029 kNumberOfXmmRegisters,
1030 kNumberOfRegisterPairs,
1031 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1032 arraysize(kCoreCalleeSaves))
1033 | (1 << kFakeReturnRegister),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001034 0,
1035 compiler_options,
1036 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001037 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001038 location_builder_(graph, this),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001039 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001040 move_resolver_(graph->GetAllocator(), this),
1041 assembler_(graph->GetAllocator()),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001042 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1043 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1044 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1045 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001046 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001047 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001048 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001049 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1050 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko93205e32016-04-13 11:59:46 +01001051 constant_area_start_(-1),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001052 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001053 method_address_offset_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001054 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001055 // Use a fake return address register to mimic Quick.
1056 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001057}
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001058
David Brazdil58282f42016-01-14 12:45:10 +00001059void CodeGeneratorX86::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001060 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001061 blocked_core_registers_[ESP] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001062}
1063
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001064InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001065 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001066 assembler_(codegen->GetAssembler()),
1067 codegen_(codegen) {}
1068
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001069static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001070 return dwarf::Reg::X86Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001071}
1072
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001073void CodeGeneratorX86::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001074 __ cfi().SetCurrentCFAOffset(kX86WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001075 __ Bind(&frame_entry_label_);
Roland Levillain199f3362014-11-27 17:15:16 +00001076 bool skip_overflow_check =
1077 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001078 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Calin Juravle93edf732015-01-20 20:14:07 +00001079
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001080 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1081 __ addw(Address(kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value()),
1082 Immediate(1));
1083 }
1084
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001085 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001086 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86);
1087 __ testl(EAX, Address(ESP, -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001088 RecordPcInfo(nullptr, 0);
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001089 }
1090
Mark Mendell5f874182015-03-04 15:42:45 -05001091 if (HasEmptyFrame()) {
1092 return;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001093 }
Mark Mendell5f874182015-03-04 15:42:45 -05001094
1095 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1096 Register reg = kCoreCalleeSaves[i];
1097 if (allocated_registers_.ContainsCoreRegister(reg)) {
1098 __ pushl(reg);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001099 __ cfi().AdjustCFAOffset(kX86WordSize);
1100 __ cfi().RelOffset(DWARFReg(reg), 0);
Mark Mendell5f874182015-03-04 15:42:45 -05001101 }
1102 }
1103
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001104 int adjust = GetFrameSize() - FrameEntrySpillSize();
1105 __ subl(ESP, Immediate(adjust));
1106 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001107 // Save the current method if we need it. Note that we do not
1108 // do this in HCurrentMethod, as the instruction might have been removed
1109 // in the SSA graph.
1110 if (RequiresCurrentMethod()) {
1111 __ movl(Address(ESP, kCurrentMethodStackOffset), kMethodRegisterArgument);
1112 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001113
1114 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1115 // Initialize should_deoptimize flag to 0.
1116 __ movl(Address(ESP, GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1117 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001118}
1119
1120void CodeGeneratorX86::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001121 __ cfi().RememberState();
1122 if (!HasEmptyFrame()) {
1123 int adjust = GetFrameSize() - FrameEntrySpillSize();
1124 __ addl(ESP, Immediate(adjust));
1125 __ cfi().AdjustCFAOffset(-adjust);
Mark Mendell5f874182015-03-04 15:42:45 -05001126
David Srbeckyc34dc932015-04-12 09:27:43 +01001127 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1128 Register reg = kCoreCalleeSaves[i];
1129 if (allocated_registers_.ContainsCoreRegister(reg)) {
1130 __ popl(reg);
1131 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86WordSize));
1132 __ cfi().Restore(DWARFReg(reg));
1133 }
Mark Mendell5f874182015-03-04 15:42:45 -05001134 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001135 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001136 __ ret();
1137 __ cfi().RestoreState();
1138 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001139}
1140
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001141void CodeGeneratorX86::Bind(HBasicBlock* block) {
1142 __ Bind(GetLabelOf(block));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001143}
1144
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001145Location InvokeDexCallingConventionVisitorX86::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001146 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001147 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001148 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001149 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001150 case DataType::Type::kInt8:
1151 case DataType::Type::kUint16:
1152 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08001153 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001154 case DataType::Type::kInt32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001155 return Location::RegisterLocation(EAX);
1156
Aart Bik66c158e2018-01-31 12:55:04 -08001157 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001158 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001159 return Location::RegisterPairLocation(EAX, EDX);
1160
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001161 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001162 return Location::NoLocation();
1163
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001164 case DataType::Type::kFloat64:
1165 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001166 return Location::FpuRegisterLocation(XMM0);
1167 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01001168
1169 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001170}
1171
1172Location InvokeDexCallingConventionVisitorX86::GetMethodLocation() const {
1173 return Location::RegisterLocation(kMethodRegisterArgument);
1174}
1175
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001176Location InvokeDexCallingConventionVisitorX86::GetNextLocation(DataType::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001177 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001178 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001179 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001180 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001181 case DataType::Type::kInt8:
1182 case DataType::Type::kUint16:
1183 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001184 case DataType::Type::kInt32: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001185 uint32_t index = gp_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001186 stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001187 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001188 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001189 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001190 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001191 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001192 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001193
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001194 case DataType::Type::kInt64: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001195 uint32_t index = gp_index_;
1196 gp_index_ += 2;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001197 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001198 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001199 X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
1200 calling_convention.GetRegisterPairAt(index));
1201 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001202 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001203 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
1204 }
1205 }
1206
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001207 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001208 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001209 stack_index_++;
1210 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1211 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1212 } else {
1213 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
1214 }
1215 }
1216
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001217 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001218 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001219 stack_index_ += 2;
1220 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1221 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1222 } else {
1223 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001224 }
1225 }
1226
Aart Bik66c158e2018-01-31 12:55:04 -08001227 case DataType::Type::kUint32:
1228 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001229 case DataType::Type::kVoid:
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001230 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08001231 UNREACHABLE();
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001232 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001233 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001234}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001235
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001236void CodeGeneratorX86::Move32(Location destination, Location source) {
1237 if (source.Equals(destination)) {
1238 return;
1239 }
1240 if (destination.IsRegister()) {
1241 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001242 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001243 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001244 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001245 } else {
1246 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001247 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001248 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001249 } else if (destination.IsFpuRegister()) {
1250 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001251 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001252 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001253 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001254 } else {
1255 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001256 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001257 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001258 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001259 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001260 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001261 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001262 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001263 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05001264 } else if (source.IsConstant()) {
1265 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001266 int32_t value = GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05001267 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001268 } else {
1269 DCHECK(source.IsStackSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001270 __ pushl(Address(ESP, source.GetStackIndex()));
1271 __ popl(Address(ESP, destination.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001272 }
1273 }
1274}
1275
1276void CodeGeneratorX86::Move64(Location destination, Location source) {
1277 if (source.Equals(destination)) {
1278 return;
1279 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001280 if (destination.IsRegisterPair()) {
1281 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001282 EmitParallelMoves(
1283 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
1284 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001286 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001287 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001288 DataType::Type::kInt32);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001289 } else if (source.IsFpuRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001290 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
1291 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
1292 __ psrlq(src_reg, Immediate(32));
1293 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001294 } else {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001295 // No conflict possible, so just do the moves.
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001296 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001297 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
1298 __ movl(destination.AsRegisterPairHigh<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001299 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
1300 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001301 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05001302 if (source.IsFpuRegister()) {
1303 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
1304 } else if (source.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001305 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001306 } else if (source.IsRegisterPair()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001307 size_t elem_size = DataType::Size(DataType::Type::kInt32);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001308 // Create stack space for 2 elements.
1309 __ subl(ESP, Immediate(2 * elem_size));
1310 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
1311 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
1312 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
1313 // And remove the temporary stack space we allocated.
1314 __ addl(ESP, Immediate(2 * elem_size));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001315 } else {
1316 LOG(FATAL) << "Unimplemented";
1317 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001318 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001319 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001320 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001321 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001322 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001323 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001324 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001325 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001326 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001327 } else if (source.IsConstant()) {
1328 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001329 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1330 int64_t value = GetInt64ValueOf(constant);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001331 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(Low32Bits(value)));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001332 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
1333 Immediate(High32Bits(value)));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001334 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001335 DCHECK(source.IsDoubleStackSlot()) << source;
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001336 EmitParallelMoves(
1337 Location::StackSlot(source.GetStackIndex()),
1338 Location::StackSlot(destination.GetStackIndex()),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001339 DataType::Type::kInt32,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001340 Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001341 Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001342 DataType::Type::kInt32);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001343 }
1344 }
1345}
1346
Calin Juravle175dc732015-08-25 15:42:32 +01001347void CodeGeneratorX86::MoveConstant(Location location, int32_t value) {
1348 DCHECK(location.IsRegister());
1349 __ movl(location.AsRegister<Register>(), Immediate(value));
1350}
1351
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001352void CodeGeneratorX86::MoveLocation(Location dst, Location src, DataType::Type dst_type) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001353 HParallelMove move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001354 if (dst_type == DataType::Type::kInt64 && !src.IsConstant() && !src.IsFpuRegister()) {
1355 move.AddMove(src.ToLow(), dst.ToLow(), DataType::Type::kInt32, nullptr);
1356 move.AddMove(src.ToHigh(), dst.ToHigh(), DataType::Type::kInt32, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001357 } else {
David Brazdil74eb1b22015-12-14 11:44:01 +00001358 move.AddMove(src, dst, dst_type, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001359 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001360 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001361}
1362
1363void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
1364 if (location.IsRegister()) {
1365 locations->AddTemp(location);
1366 } else if (location.IsRegisterPair()) {
1367 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1368 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
1369 } else {
1370 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1371 }
1372}
1373
David Brazdilfc6a86a2015-06-26 10:33:45 +00001374void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001375 if (successor->IsExitBlock()) {
1376 DCHECK(got->GetPrevious()->AlwaysThrows());
1377 return; // no code needed
1378 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001379
1380 HBasicBlock* block = got->GetBlock();
1381 HInstruction* previous = got->GetPrevious();
1382
1383 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001384 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001385 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
1386 __ pushl(EAX);
1387 __ movl(EAX, Address(ESP, kX86WordSize));
1388 __ addw(Address(EAX, ArtMethod::HotnessCountOffset().Int32Value()), Immediate(1));
1389 __ popl(EAX);
1390 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001391 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1392 return;
1393 }
1394
1395 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1396 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1397 }
1398 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001399 __ jmp(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001400 }
1401}
1402
David Brazdilfc6a86a2015-06-26 10:33:45 +00001403void LocationsBuilderX86::VisitGoto(HGoto* got) {
1404 got->SetLocations(nullptr);
1405}
1406
1407void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
1408 HandleGoto(got, got->GetSuccessor());
1409}
1410
1411void LocationsBuilderX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1412 try_boundary->SetLocations(nullptr);
1413}
1414
1415void InstructionCodeGeneratorX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1416 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1417 if (!successor->IsExitBlock()) {
1418 HandleGoto(try_boundary, successor);
1419 }
1420}
1421
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001422void LocationsBuilderX86::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001423 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001424}
1425
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001426void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001427}
1428
Mark Mendell152408f2015-12-31 12:28:50 -05001429template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001430void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001431 LabelType* true_label,
1432 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001433 if (cond->IsFPConditionTrueIfNaN()) {
1434 __ j(kUnordered, true_label);
1435 } else if (cond->IsFPConditionFalseIfNaN()) {
1436 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001437 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001438 __ j(X86UnsignedOrFPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001439}
1440
Mark Mendell152408f2015-12-31 12:28:50 -05001441template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001442void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001443 LabelType* true_label,
1444 LabelType* false_label) {
Mark Mendellc4701932015-04-10 13:18:51 -04001445 LocationSummary* locations = cond->GetLocations();
1446 Location left = locations->InAt(0);
1447 Location right = locations->InAt(1);
1448 IfCondition if_cond = cond->GetCondition();
1449
Mark Mendellc4701932015-04-10 13:18:51 -04001450 Register left_high = left.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001451 Register left_low = left.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001452 IfCondition true_high_cond = if_cond;
1453 IfCondition false_high_cond = cond->GetOppositeCondition();
Aart Bike9f37602015-10-09 11:15:55 -07001454 Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
Mark Mendellc4701932015-04-10 13:18:51 -04001455
1456 // Set the conditions for the test, remembering that == needs to be
1457 // decided using the low words.
1458 switch (if_cond) {
1459 case kCondEQ:
Mark Mendellc4701932015-04-10 13:18:51 -04001460 case kCondNE:
Roland Levillain4fa13f62015-07-06 18:11:54 +01001461 // Nothing to do.
Mark Mendellc4701932015-04-10 13:18:51 -04001462 break;
1463 case kCondLT:
1464 false_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001465 break;
1466 case kCondLE:
1467 true_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001468 break;
1469 case kCondGT:
1470 false_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001471 break;
1472 case kCondGE:
1473 true_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001474 break;
Aart Bike9f37602015-10-09 11:15:55 -07001475 case kCondB:
1476 false_high_cond = kCondA;
1477 break;
1478 case kCondBE:
1479 true_high_cond = kCondB;
1480 break;
1481 case kCondA:
1482 false_high_cond = kCondB;
1483 break;
1484 case kCondAE:
1485 true_high_cond = kCondA;
1486 break;
Mark Mendellc4701932015-04-10 13:18:51 -04001487 }
1488
1489 if (right.IsConstant()) {
1490 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellc4701932015-04-10 13:18:51 -04001491 int32_t val_high = High32Bits(value);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001492 int32_t val_low = Low32Bits(value);
Mark Mendellc4701932015-04-10 13:18:51 -04001493
Aart Bika19616e2016-02-01 18:57:58 -08001494 codegen_->Compare32BitValue(left_high, val_high);
Mark Mendellc4701932015-04-10 13:18:51 -04001495 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001496 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001497 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001498 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001499 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001500 __ j(X86Condition(true_high_cond), true_label);
1501 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001502 }
1503 // Must be equal high, so compare the lows.
Aart Bika19616e2016-02-01 18:57:58 -08001504 codegen_->Compare32BitValue(left_low, val_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001505 } else if (right.IsRegisterPair()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001506 Register right_high = right.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001507 Register right_low = right.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001508
1509 __ cmpl(left_high, right_high);
1510 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001511 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001512 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001513 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001514 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001515 __ j(X86Condition(true_high_cond), true_label);
1516 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001517 }
1518 // Must be equal high, so compare the lows.
1519 __ cmpl(left_low, right_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001520 } else {
1521 DCHECK(right.IsDoubleStackSlot());
1522 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1523 if (if_cond == kCondNE) {
1524 __ j(X86Condition(true_high_cond), true_label);
1525 } else if (if_cond == kCondEQ) {
1526 __ j(X86Condition(false_high_cond), false_label);
1527 } else {
1528 __ j(X86Condition(true_high_cond), true_label);
1529 __ j(X86Condition(false_high_cond), false_label);
1530 }
1531 // Must be equal high, so compare the lows.
1532 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Mark Mendellc4701932015-04-10 13:18:51 -04001533 }
1534 // The last comparison might be unsigned.
1535 __ j(final_condition, true_label);
1536}
1537
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001538void InstructionCodeGeneratorX86::GenerateFPCompare(Location lhs,
1539 Location rhs,
1540 HInstruction* insn,
1541 bool is_double) {
1542 HX86LoadFromConstantTable* const_area = insn->InputAt(1)->AsX86LoadFromConstantTable();
1543 if (is_double) {
1544 if (rhs.IsFpuRegister()) {
1545 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1546 } else if (const_area != nullptr) {
1547 DCHECK(const_area->IsEmittedAtUseSite());
1548 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(),
1549 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001550 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
1551 const_area->GetBaseMethodAddress(),
1552 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001553 } else {
1554 DCHECK(rhs.IsDoubleStackSlot());
1555 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1556 }
1557 } else {
1558 if (rhs.IsFpuRegister()) {
1559 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1560 } else if (const_area != nullptr) {
1561 DCHECK(const_area->IsEmittedAtUseSite());
1562 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(),
1563 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001564 const_area->GetConstant()->AsFloatConstant()->GetValue(),
1565 const_area->GetBaseMethodAddress(),
1566 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001567 } else {
1568 DCHECK(rhs.IsStackSlot());
1569 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1570 }
1571 }
1572}
1573
Mark Mendell152408f2015-12-31 12:28:50 -05001574template<class LabelType>
David Brazdil0debae72015-11-12 18:37:00 +00001575void InstructionCodeGeneratorX86::GenerateCompareTestAndBranch(HCondition* condition,
Mark Mendell152408f2015-12-31 12:28:50 -05001576 LabelType* true_target_in,
1577 LabelType* false_target_in) {
David Brazdil0debae72015-11-12 18:37:00 +00001578 // Generated branching requires both targets to be explicit. If either of the
1579 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Mark Mendell152408f2015-12-31 12:28:50 -05001580 LabelType fallthrough_target;
1581 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1582 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
David Brazdil0debae72015-11-12 18:37:00 +00001583
Mark Mendellc4701932015-04-10 13:18:51 -04001584 LocationSummary* locations = condition->GetLocations();
1585 Location left = locations->InAt(0);
1586 Location right = locations->InAt(1);
1587
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001588 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001589 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001590 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001591 GenerateLongComparesAndJumps(condition, true_target, false_target);
1592 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001593 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001594 GenerateFPCompare(left, right, condition, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001595 GenerateFPJumps(condition, true_target, false_target);
1596 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001597 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001598 GenerateFPCompare(left, right, condition, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001599 GenerateFPJumps(condition, true_target, false_target);
1600 break;
1601 default:
1602 LOG(FATAL) << "Unexpected compare type " << type;
1603 }
1604
David Brazdil0debae72015-11-12 18:37:00 +00001605 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001606 __ jmp(false_target);
1607 }
David Brazdil0debae72015-11-12 18:37:00 +00001608
1609 if (fallthrough_target.IsLinked()) {
1610 __ Bind(&fallthrough_target);
1611 }
Mark Mendellc4701932015-04-10 13:18:51 -04001612}
1613
David Brazdil0debae72015-11-12 18:37:00 +00001614static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1615 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1616 // are set only strictly before `branch`. We can't use the eflags on long/FP
1617 // conditions if they are materialized due to the complex branching.
1618 return cond->IsCondition() &&
1619 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001620 cond->InputAt(0)->GetType() != DataType::Type::kInt64 &&
1621 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001622}
1623
Mark Mendell152408f2015-12-31 12:28:50 -05001624template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001625void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001626 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001627 LabelType* true_target,
1628 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001629 HInstruction* cond = instruction->InputAt(condition_input_index);
1630
1631 if (true_target == nullptr && false_target == nullptr) {
1632 // Nothing to do. The code always falls through.
1633 return;
1634 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001635 // Constant condition, statically compared against "true" (integer value 1).
1636 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001637 if (true_target != nullptr) {
1638 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001639 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001640 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001641 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001642 if (false_target != nullptr) {
1643 __ jmp(false_target);
1644 }
1645 }
1646 return;
1647 }
1648
1649 // The following code generates these patterns:
1650 // (1) true_target == nullptr && false_target != nullptr
1651 // - opposite condition true => branch to false_target
1652 // (2) true_target != nullptr && false_target == nullptr
1653 // - condition true => branch to true_target
1654 // (3) true_target != nullptr && false_target != nullptr
1655 // - condition true => branch to true_target
1656 // - branch to false_target
1657 if (IsBooleanValueOrMaterializedCondition(cond)) {
1658 if (AreEflagsSetFrom(cond, instruction)) {
1659 if (true_target == nullptr) {
1660 __ j(X86Condition(cond->AsCondition()->GetOppositeCondition()), false_target);
1661 } else {
1662 __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
1663 }
1664 } else {
1665 // Materialized condition, compare against 0.
1666 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1667 if (lhs.IsRegister()) {
1668 __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
1669 } else {
1670 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
1671 }
1672 if (true_target == nullptr) {
1673 __ j(kEqual, false_target);
1674 } else {
1675 __ j(kNotEqual, true_target);
1676 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001677 }
1678 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001679 // Condition has not been materialized, use its inputs as the comparison and
1680 // its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001681 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00001682
1683 // If this is a long or FP comparison that has been folded into
1684 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001685 DataType::Type type = condition->InputAt(0)->GetType();
1686 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001687 GenerateCompareTestAndBranch(condition, true_target, false_target);
1688 return;
1689 }
1690
1691 Location lhs = condition->GetLocations()->InAt(0);
1692 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001693 // LHS is guaranteed to be in a register (see LocationsBuilderX86::HandleCondition).
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001694 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001695 if (true_target == nullptr) {
1696 __ j(X86Condition(condition->GetOppositeCondition()), false_target);
1697 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001698 __ j(X86Condition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001699 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001700 }
David Brazdil0debae72015-11-12 18:37:00 +00001701
1702 // If neither branch falls through (case 3), the conditional branch to `true_target`
1703 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1704 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001705 __ jmp(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001706 }
1707}
1708
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001709void LocationsBuilderX86::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001710 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001711 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001712 locations->SetInAt(0, Location::Any());
1713 }
1714}
1715
1716void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001717 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1718 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1719 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1720 nullptr : codegen_->GetLabelOf(true_successor);
1721 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1722 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001723 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001724}
1725
1726void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001727 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001728 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001729 InvokeRuntimeCallingConvention calling_convention;
1730 RegisterSet caller_saves = RegisterSet::Empty();
1731 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1732 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001733 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001734 locations->SetInAt(0, Location::Any());
1735 }
1736}
1737
1738void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001739 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001740 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001741 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001742 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001743 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001744}
1745
Mingyao Yang063fc772016-08-02 11:02:54 -07001746void LocationsBuilderX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001747 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001748 LocationSummary(flag, LocationSummary::kNoCall);
1749 locations->SetOut(Location::RequiresRegister());
1750}
1751
1752void InstructionCodeGeneratorX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1753 __ movl(flag->GetLocations()->Out().AsRegister<Register>(),
1754 Address(ESP, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1755}
1756
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001757static bool SelectCanUseCMOV(HSelect* select) {
1758 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001759 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001760 return false;
1761 }
1762
1763 // A FP condition doesn't generate the single CC that we need.
1764 // In 32 bit mode, a long condition doesn't generate a single CC either.
1765 HInstruction* condition = select->GetCondition();
1766 if (condition->IsCondition()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001767 DataType::Type compare_type = condition->InputAt(0)->GetType();
1768 if (compare_type == DataType::Type::kInt64 ||
1769 DataType::IsFloatingPointType(compare_type)) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001770 return false;
1771 }
1772 }
1773
1774 // We can generate a CMOV for this Select.
1775 return true;
1776}
1777
David Brazdil74eb1b22015-12-14 11:44:01 +00001778void LocationsBuilderX86::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001779 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001780 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001781 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001782 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001783 } else {
1784 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001785 if (SelectCanUseCMOV(select)) {
1786 if (select->InputAt(1)->IsConstant()) {
1787 // Cmov can't handle a constant value.
1788 locations->SetInAt(1, Location::RequiresRegister());
1789 } else {
1790 locations->SetInAt(1, Location::Any());
1791 }
1792 } else {
1793 locations->SetInAt(1, Location::Any());
1794 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001795 }
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001796 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1797 locations->SetInAt(2, Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00001798 }
1799 locations->SetOut(Location::SameAsFirstInput());
1800}
1801
1802void InstructionCodeGeneratorX86::VisitSelect(HSelect* select) {
1803 LocationSummary* locations = select->GetLocations();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001804 DCHECK(locations->InAt(0).Equals(locations->Out()));
1805 if (SelectCanUseCMOV(select)) {
1806 // If both the condition and the source types are integer, we can generate
1807 // a CMOV to implement Select.
1808
1809 HInstruction* select_condition = select->GetCondition();
1810 Condition cond = kNotEqual;
1811
1812 // Figure out how to test the 'condition'.
1813 if (select_condition->IsCondition()) {
1814 HCondition* condition = select_condition->AsCondition();
1815 if (!condition->IsEmittedAtUseSite()) {
1816 // This was a previously materialized condition.
1817 // Can we use the existing condition code?
1818 if (AreEflagsSetFrom(condition, select)) {
1819 // Materialization was the previous instruction. Condition codes are right.
1820 cond = X86Condition(condition->GetCondition());
1821 } else {
1822 // No, we have to recreate the condition code.
1823 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1824 __ testl(cond_reg, cond_reg);
1825 }
1826 } else {
1827 // We can't handle FP or long here.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001828 DCHECK_NE(condition->InputAt(0)->GetType(), DataType::Type::kInt64);
1829 DCHECK(!DataType::IsFloatingPointType(condition->InputAt(0)->GetType()));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001830 LocationSummary* cond_locations = condition->GetLocations();
Roland Levillain0b671c02016-08-19 12:02:34 +01001831 codegen_->GenerateIntCompare(cond_locations->InAt(0), cond_locations->InAt(1));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001832 cond = X86Condition(condition->GetCondition());
1833 }
1834 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001835 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001836 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1837 __ testl(cond_reg, cond_reg);
1838 }
1839
1840 // If the condition is true, overwrite the output, which already contains false.
1841 Location false_loc = locations->InAt(0);
1842 Location true_loc = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001843 if (select->GetType() == DataType::Type::kInt64) {
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001844 // 64 bit conditional move.
1845 Register false_high = false_loc.AsRegisterPairHigh<Register>();
1846 Register false_low = false_loc.AsRegisterPairLow<Register>();
1847 if (true_loc.IsRegisterPair()) {
1848 __ cmovl(cond, false_high, true_loc.AsRegisterPairHigh<Register>());
1849 __ cmovl(cond, false_low, true_loc.AsRegisterPairLow<Register>());
1850 } else {
1851 __ cmovl(cond, false_high, Address(ESP, true_loc.GetHighStackIndex(kX86WordSize)));
1852 __ cmovl(cond, false_low, Address(ESP, true_loc.GetStackIndex()));
1853 }
1854 } else {
1855 // 32 bit conditional move.
1856 Register false_reg = false_loc.AsRegister<Register>();
1857 if (true_loc.IsRegister()) {
1858 __ cmovl(cond, false_reg, true_loc.AsRegister<Register>());
1859 } else {
1860 __ cmovl(cond, false_reg, Address(ESP, true_loc.GetStackIndex()));
1861 }
1862 }
1863 } else {
1864 NearLabel false_target;
1865 GenerateTestAndBranch<NearLabel>(
Andreas Gampe3db70682018-12-26 15:12:03 -08001866 select, /* condition_input_index= */ 2, /* true_target= */ nullptr, &false_target);
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001867 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1868 __ Bind(&false_target);
1869 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001870}
1871
David Srbecky0cf44932015-12-09 14:09:59 +00001872void LocationsBuilderX86::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001873 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001874}
1875
David Srbeckyd28f4a02016-03-14 17:14:24 +00001876void InstructionCodeGeneratorX86::VisitNativeDebugInfo(HNativeDebugInfo*) {
1877 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001878}
1879
1880void CodeGeneratorX86::GenerateNop() {
1881 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001882}
1883
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001884void LocationsBuilderX86::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001885 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001886 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001887 // Handle the long/FP comparisons made in instruction simplification.
1888 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001889 case DataType::Type::kInt64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001890 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell8659e842016-02-16 10:41:46 -05001891 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001892 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001893 locations->SetOut(Location::RequiresRegister());
1894 }
1895 break;
1896 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001897 case DataType::Type::kFloat32:
1898 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001899 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001900 if (cond->InputAt(1)->IsX86LoadFromConstantTable()) {
1901 DCHECK(cond->InputAt(1)->IsEmittedAtUseSite());
1902 } else if (cond->InputAt(1)->IsConstant()) {
1903 locations->SetInAt(1, Location::RequiresFpuRegister());
1904 } else {
1905 locations->SetInAt(1, Location::Any());
1906 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001907 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001908 locations->SetOut(Location::RequiresRegister());
1909 }
1910 break;
1911 }
1912 default:
1913 locations->SetInAt(0, Location::RequiresRegister());
1914 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001915 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001916 // We need a byte register.
1917 locations->SetOut(Location::RegisterLocation(ECX));
1918 }
1919 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001920 }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001921}
1922
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001923void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001924 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001925 return;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001926 }
Mark Mendellc4701932015-04-10 13:18:51 -04001927
1928 LocationSummary* locations = cond->GetLocations();
1929 Location lhs = locations->InAt(0);
1930 Location rhs = locations->InAt(1);
1931 Register reg = locations->Out().AsRegister<Register>();
Mark Mendell152408f2015-12-31 12:28:50 -05001932 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001933
1934 switch (cond->InputAt(0)->GetType()) {
1935 default: {
1936 // Integer case.
1937
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01001938 // Clear output register: setb only sets the low byte.
Mark Mendellc4701932015-04-10 13:18:51 -04001939 __ xorl(reg, reg);
Roland Levillain0b671c02016-08-19 12:02:34 +01001940 codegen_->GenerateIntCompare(lhs, rhs);
Aart Bike9f37602015-10-09 11:15:55 -07001941 __ setb(X86Condition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001942 return;
1943 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001944 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001945 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1946 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001947 case DataType::Type::kFloat32:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001948 GenerateFPCompare(lhs, rhs, cond, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001949 GenerateFPJumps(cond, &true_label, &false_label);
1950 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001951 case DataType::Type::kFloat64:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001952 GenerateFPCompare(lhs, rhs, cond, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001953 GenerateFPJumps(cond, &true_label, &false_label);
1954 break;
1955 }
1956
1957 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001958 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001959
Roland Levillain4fa13f62015-07-06 18:11:54 +01001960 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001961 __ Bind(&false_label);
1962 __ xorl(reg, reg);
1963 __ jmp(&done_label);
1964
Roland Levillain4fa13f62015-07-06 18:11:54 +01001965 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001966 __ Bind(&true_label);
1967 __ movl(reg, Immediate(1));
1968 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001969}
1970
1971void LocationsBuilderX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001972 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001973}
1974
1975void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001976 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001977}
1978
1979void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001980 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001981}
1982
1983void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001984 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001985}
1986
1987void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001988 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001989}
1990
1991void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001992 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001993}
1994
1995void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001996 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001997}
1998
1999void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002000 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002001}
2002
2003void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002004 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002005}
2006
2007void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002008 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002009}
2010
2011void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002012 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002013}
2014
2015void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002016 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002017}
2018
Aart Bike9f37602015-10-09 11:15:55 -07002019void LocationsBuilderX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002020 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002021}
2022
2023void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002024 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002025}
2026
2027void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002028 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002029}
2030
2031void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002032 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002033}
2034
2035void LocationsBuilderX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002036 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002037}
2038
2039void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002040 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002041}
2042
2043void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002044 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002045}
2046
2047void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002048 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002049}
2050
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002051void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002052 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002053 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002054 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002055}
2056
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002057void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002058 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002059}
2060
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002061void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
2062 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002063 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002064 locations->SetOut(Location::ConstantLocation(constant));
2065}
2066
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002067void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002068 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002069}
2070
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002071void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002072 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002073 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002074 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002075}
2076
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002077void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002078 // Will be generated at use site.
2079}
2080
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002081void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
2082 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002083 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002084 locations->SetOut(Location::ConstantLocation(constant));
2085}
2086
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002087void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002088 // Will be generated at use site.
2089}
2090
2091void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
2092 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002093 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002094 locations->SetOut(Location::ConstantLocation(constant));
2095}
2096
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002097void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002098 // Will be generated at use site.
2099}
2100
Igor Murashkind01745e2017-04-05 16:40:31 -07002101void LocationsBuilderX86::VisitConstructorFence(HConstructorFence* constructor_fence) {
2102 constructor_fence->SetLocations(nullptr);
2103}
2104
2105void InstructionCodeGeneratorX86::VisitConstructorFence(
2106 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2107 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2108}
2109
Calin Juravle27df7582015-04-17 19:12:31 +01002110void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2111 memory_barrier->SetLocations(nullptr);
2112}
2113
2114void InstructionCodeGeneratorX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002115 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002116}
2117
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002118void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002119 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002120}
2121
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002122void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002123 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002124}
2125
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002126void LocationsBuilderX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002127 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002128 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002129 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002130 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002131 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002132 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002133 case DataType::Type::kInt8:
2134 case DataType::Type::kUint16:
2135 case DataType::Type::kInt16:
2136 case DataType::Type::kInt32:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002137 locations->SetInAt(0, Location::RegisterLocation(EAX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002138 break;
2139
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002140 case DataType::Type::kInt64:
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002141 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002142 0, Location::RegisterPairLocation(EAX, EDX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002143 break;
2144
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002145 case DataType::Type::kFloat32:
2146 case DataType::Type::kFloat64:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002147 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002148 0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002149 break;
2150
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002151 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002152 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002153 }
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002154}
2155
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002156void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002157 if (kIsDebugBuild) {
2158 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002159 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002160 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002161 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002162 case DataType::Type::kInt8:
2163 case DataType::Type::kUint16:
2164 case DataType::Type::kInt16:
2165 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002166 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002167 break;
2168
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002169 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002170 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
2171 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002172 break;
2173
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002174 case DataType::Type::kFloat32:
2175 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002176 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002177 break;
2178
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002179 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002180 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002181 }
2182 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002183 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002184}
2185
Calin Juravle175dc732015-08-25 15:42:32 +01002186void LocationsBuilderX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2187 // The trampoline uses the same calling convention as dex calling conventions,
2188 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2189 // the method_idx.
2190 HandleInvoke(invoke);
2191}
2192
2193void InstructionCodeGeneratorX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2194 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2195}
2196
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002197void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002198 // Explicit clinit checks triggered by static invokes must have been pruned by
2199 // art::PrepareForRegisterAllocation.
2200 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002201
Mark Mendellfb8d2792015-03-31 22:16:59 -04002202 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002203 if (intrinsic.TryDispatch(invoke)) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002204 if (invoke->GetLocations()->CanCall() &&
2205 invoke->HasPcRelativeMethodLoadKind() &&
2206 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).IsInvalid()) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00002207 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002208 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002209 return;
2210 }
2211
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002212 HandleInvoke(invoke);
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002213
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002214 // For PC-relative dex cache the invoke has an extra input, the PC-relative address base.
Vladimir Marko65979462017-05-19 17:25:12 +01002215 if (invoke->HasPcRelativeMethodLoadKind()) {
Vladimir Markob4536b72015-11-24 13:45:23 +00002216 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002217 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002218}
2219
Mark Mendell09ed1a32015-03-25 08:30:06 -04002220static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) {
2221 if (invoke->GetLocations()->Intrinsified()) {
2222 IntrinsicCodeGeneratorX86 intrinsic(codegen);
2223 intrinsic.Dispatch(invoke);
2224 return true;
2225 }
2226 return false;
2227}
2228
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002229void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002230 // Explicit clinit checks triggered by static invokes must have been pruned by
2231 // art::PrepareForRegisterAllocation.
2232 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002233
Mark Mendell09ed1a32015-03-25 08:30:06 -04002234 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2235 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002236 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002237
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002238 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002239 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002240 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002241}
2242
2243void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00002244 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2245 if (intrinsic.TryDispatch(invoke)) {
2246 return;
2247 }
2248
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002249 HandleInvoke(invoke);
2250}
2251
2252void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002253 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002254 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002255}
2256
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002257void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002258 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2259 return;
2260 }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002261
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002262 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002263 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002264}
2265
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002266void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002267 // This call to HandleInvoke allocates a temporary (core) register
2268 // which is also used to transfer the hidden argument from FP to
2269 // core register.
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002270 HandleInvoke(invoke);
2271 // Add the hidden argument.
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002272 invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM7));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002273}
2274
2275void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
2276 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002277 LocationSummary* locations = invoke->GetLocations();
2278 Register temp = locations->GetTemp(0).AsRegister<Register>();
2279 XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002280 Location receiver = locations->InAt(0);
2281 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2282
Roland Levillain0d5a2812015-11-13 10:07:31 +00002283 // Set the hidden argument. This is safe to do this here, as XMM7
2284 // won't be modified thereafter, before the `call` instruction.
2285 DCHECK_EQ(XMM7, hidden_reg);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002286 __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002287 __ movd(hidden_reg, temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002288
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002289 if (receiver.IsStackSlot()) {
2290 __ movl(temp, Address(ESP, receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002291 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002292 __ movl(temp, Address(temp, class_offset));
2293 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002294 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002295 __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002296 }
Roland Levillain4d027112015-07-01 15:41:14 +01002297 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002298 // Instead of simply (possibly) unpoisoning `temp` here, we should
2299 // emit a read barrier for the previous class reference load.
2300 // However this is not required in practice, as this is an
2301 // intermediate/temporary reference and because the current
2302 // concurrent copying collector keeps the from-space memory
2303 // intact/accessible until the end of the marking phase (the
2304 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002305 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002306 // temp = temp->GetAddressOfIMT()
2307 __ movl(temp,
2308 Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002309 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002310 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002311 invoke->GetImtIndex(), kX86PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002312 __ movl(temp, Address(temp, method_offset));
2313 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002314 __ call(Address(temp,
Andreas Gampe542451c2016-07-26 09:02:02 -07002315 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002316
2317 DCHECK(!codegen_->IsLeafMethod());
2318 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2319}
2320
Orion Hodsonac141392017-01-13 11:53:47 +00002321void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2322 HandleInvoke(invoke);
2323}
2324
2325void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2326 codegen_->GenerateInvokePolymorphicCall(invoke);
2327}
2328
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002329void LocationsBuilderX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2330 HandleInvoke(invoke);
2331}
2332
2333void InstructionCodeGeneratorX86::VisitInvokeCustom(HInvokeCustom* invoke) {
2334 codegen_->GenerateInvokeCustomCall(invoke);
2335}
2336
Roland Levillain88cb1752014-10-20 16:36:47 +01002337void LocationsBuilderX86::VisitNeg(HNeg* neg) {
2338 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002339 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002340 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002341 case DataType::Type::kInt32:
2342 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002343 locations->SetInAt(0, Location::RequiresRegister());
2344 locations->SetOut(Location::SameAsFirstInput());
2345 break;
2346
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002347 case DataType::Type::kFloat32:
Roland Levillain5368c212014-11-27 15:03:41 +00002348 locations->SetInAt(0, Location::RequiresFpuRegister());
2349 locations->SetOut(Location::SameAsFirstInput());
2350 locations->AddTemp(Location::RequiresRegister());
2351 locations->AddTemp(Location::RequiresFpuRegister());
2352 break;
2353
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002354 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002355 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002356 locations->SetOut(Location::SameAsFirstInput());
2357 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002358 break;
2359
2360 default:
2361 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2362 }
2363}
2364
2365void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
2366 LocationSummary* locations = neg->GetLocations();
2367 Location out = locations->Out();
2368 Location in = locations->InAt(0);
2369 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002370 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002371 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002372 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002373 __ negl(out.AsRegister<Register>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002374 break;
2375
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002376 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002377 DCHECK(in.IsRegisterPair());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002378 DCHECK(in.Equals(out));
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002379 __ negl(out.AsRegisterPairLow<Register>());
2380 // Negation is similar to subtraction from zero. The least
2381 // significant byte triggers a borrow when it is different from
2382 // zero; to take it into account, add 1 to the most significant
2383 // byte if the carry flag (CF) is set to 1 after the first NEGL
2384 // operation.
2385 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2386 __ negl(out.AsRegisterPairHigh<Register>());
2387 break;
2388
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002389 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002390 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002391 Register constant = locations->GetTemp(0).AsRegister<Register>();
2392 XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002393 // Implement float negation with an exclusive or with value
2394 // 0x80000000 (mask for bit 31, representing the sign of a
2395 // single-precision floating-point number).
2396 __ movl(constant, Immediate(INT32_C(0x80000000)));
2397 __ movd(mask, constant);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002398 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002399 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002400 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002401
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002402 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002403 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002404 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002405 // Implement double negation with an exclusive or with value
2406 // 0x8000000000000000 (mask for bit 63, representing the sign of
2407 // a double-precision floating-point number).
2408 __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002409 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002410 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002411 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002412
2413 default:
2414 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2415 }
2416}
2417
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002418void LocationsBuilderX86::VisitX86FPNeg(HX86FPNeg* neg) {
2419 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002420 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002421 DCHECK(DataType::IsFloatingPointType(neg->GetType()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002422 locations->SetInAt(0, Location::RequiresFpuRegister());
2423 locations->SetInAt(1, Location::RequiresRegister());
2424 locations->SetOut(Location::SameAsFirstInput());
2425 locations->AddTemp(Location::RequiresFpuRegister());
2426}
2427
2428void InstructionCodeGeneratorX86::VisitX86FPNeg(HX86FPNeg* neg) {
2429 LocationSummary* locations = neg->GetLocations();
2430 Location out = locations->Out();
2431 DCHECK(locations->InAt(0).Equals(out));
2432
2433 Register constant_area = locations->InAt(1).AsRegister<Register>();
2434 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002435 if (neg->GetType() == DataType::Type::kFloat32) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002436 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x80000000),
2437 neg->GetBaseMethodAddress(),
2438 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002439 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2440 } else {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002441 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000),
2442 neg->GetBaseMethodAddress(),
2443 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002444 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2445 }
2446}
2447
Roland Levillaindff1f282014-11-05 14:15:05 +00002448void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002449 DataType::Type result_type = conversion->GetResultType();
2450 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002451 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2452 << input_type << " -> " << result_type;
Roland Levillain624279f2014-12-04 11:54:28 +00002453
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002454 // The float-to-long and double-to-long type conversions rely on a
2455 // call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00002456 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002457 ((input_type == DataType::Type::kFloat32 || input_type == DataType::Type::kFloat64)
2458 && result_type == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002459 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00002460 : LocationSummary::kNoCall;
2461 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002462 new (GetGraph()->GetAllocator()) LocationSummary(conversion, call_kind);
Roland Levillain624279f2014-12-04 11:54:28 +00002463
Roland Levillaindff1f282014-11-05 14:15:05 +00002464 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002465 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002466 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002467 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002468 case DataType::Type::kUint8:
2469 case DataType::Type::kInt8:
2470 case DataType::Type::kUint16:
2471 case DataType::Type::kInt16:
2472 case DataType::Type::kInt32:
2473 locations->SetInAt(0, Location::ByteRegisterOrConstant(ECX, conversion->InputAt(0)));
2474 // Make the output overlap to please the register allocator. This greatly simplifies
2475 // the validation of the linear scan implementation
2476 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2477 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002478 case DataType::Type::kInt64: {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002479 HInstruction* input = conversion->InputAt(0);
2480 Location input_location = input->IsConstant()
2481 ? Location::ConstantLocation(input->AsConstant())
2482 : Location::RegisterPairLocation(EAX, EDX);
2483 locations->SetInAt(0, input_location);
2484 // Make the output overlap to please the register allocator. This greatly simplifies
2485 // the validation of the linear scan implementation
2486 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2487 break;
2488 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002489
2490 default:
2491 LOG(FATAL) << "Unexpected type conversion from " << input_type
2492 << " to " << result_type;
2493 }
2494 break;
2495
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002496 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002497 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002498 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2499 locations->SetInAt(0, Location::Any());
2500 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002501 break;
2502
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002503 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002504 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002505 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002506 locations->SetInAt(0, Location::Any());
2507 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2508 break;
2509
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002510 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002511 locations->SetInAt(0, Location::RequiresFpuRegister());
2512 locations->SetOut(Location::RequiresRegister());
2513 locations->AddTemp(Location::RequiresFpuRegister());
2514 break;
2515
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002516 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002517 locations->SetInAt(0, Location::RequiresFpuRegister());
2518 locations->SetOut(Location::RequiresRegister());
2519 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002520 break;
2521
2522 default:
2523 LOG(FATAL) << "Unexpected type conversion from " << input_type
2524 << " to " << result_type;
2525 }
2526 break;
2527
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002528 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002529 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002530 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002531 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002532 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002533 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002534 case DataType::Type::kInt16:
2535 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002536 locations->SetInAt(0, Location::RegisterLocation(EAX));
2537 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2538 break;
2539
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002540 case DataType::Type::kFloat32:
2541 case DataType::Type::kFloat64: {
Vladimir Marko949c91f2015-01-27 10:48:44 +00002542 InvokeRuntimeCallingConvention calling_convention;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002543 XmmRegister parameter = calling_convention.GetFpuRegisterAt(0);
2544 locations->SetInAt(0, Location::FpuRegisterLocation(parameter));
2545
Vladimir Marko949c91f2015-01-27 10:48:44 +00002546 // The runtime helper puts the result in EAX, EDX.
2547 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Vladimir Marko949c91f2015-01-27 10:48:44 +00002548 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002549 break;
Roland Levillaindff1f282014-11-05 14:15:05 +00002550
2551 default:
2552 LOG(FATAL) << "Unexpected type conversion from " << input_type
2553 << " to " << result_type;
2554 }
2555 break;
2556
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002557 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002558 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002559 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002560 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002561 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002562 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002563 case DataType::Type::kInt16:
2564 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00002565 locations->SetInAt(0, Location::RequiresRegister());
2566 locations->SetOut(Location::RequiresFpuRegister());
2567 break;
2568
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002569 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01002570 locations->SetInAt(0, Location::Any());
2571 locations->SetOut(Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002572 break;
2573
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002574 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002575 locations->SetInAt(0, Location::RequiresFpuRegister());
2576 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002577 break;
2578
2579 default:
2580 LOG(FATAL) << "Unexpected type conversion from " << input_type
2581 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002582 }
Roland Levillaincff13742014-11-17 14:32:17 +00002583 break;
2584
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002585 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002586 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002587 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002588 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002589 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002590 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002591 case DataType::Type::kInt16:
2592 case DataType::Type::kInt32:
Roland Levillaincff13742014-11-17 14:32:17 +00002593 locations->SetInAt(0, Location::RequiresRegister());
2594 locations->SetOut(Location::RequiresFpuRegister());
2595 break;
2596
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002597 case DataType::Type::kInt64:
Roland Levillain232ade02015-04-20 15:14:36 +01002598 locations->SetInAt(0, Location::Any());
2599 locations->SetOut(Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002600 break;
2601
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002602 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002603 locations->SetInAt(0, Location::RequiresFpuRegister());
2604 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002605 break;
2606
2607 default:
2608 LOG(FATAL) << "Unexpected type conversion from " << input_type
2609 << " to " << result_type;
2610 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002611 break;
2612
2613 default:
2614 LOG(FATAL) << "Unexpected type conversion from " << input_type
2615 << " to " << result_type;
2616 }
2617}
2618
2619void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
2620 LocationSummary* locations = conversion->GetLocations();
2621 Location out = locations->Out();
2622 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002623 DataType::Type result_type = conversion->GetResultType();
2624 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002625 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2626 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002627 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002628 case DataType::Type::kUint8:
2629 switch (input_type) {
2630 case DataType::Type::kInt8:
2631 case DataType::Type::kUint16:
2632 case DataType::Type::kInt16:
2633 case DataType::Type::kInt32:
2634 if (in.IsRegister()) {
2635 __ movzxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
2636 } else {
2637 DCHECK(in.GetConstant()->IsIntConstant());
2638 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2639 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
2640 }
2641 break;
2642 case DataType::Type::kInt64:
2643 if (in.IsRegisterPair()) {
2644 __ movzxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2645 } else {
2646 DCHECK(in.GetConstant()->IsLongConstant());
2647 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2648 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint8_t>(value)));
2649 }
2650 break;
2651
2652 default:
2653 LOG(FATAL) << "Unexpected type conversion from " << input_type
2654 << " to " << result_type;
2655 }
2656 break;
2657
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002658 case DataType::Type::kInt8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002659 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002660 case DataType::Type::kUint8:
2661 case DataType::Type::kUint16:
2662 case DataType::Type::kInt16:
2663 case DataType::Type::kInt32:
2664 if (in.IsRegister()) {
2665 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
2666 } else {
2667 DCHECK(in.GetConstant()->IsIntConstant());
2668 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2669 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2670 }
2671 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002672 case DataType::Type::kInt64:
Vladimir Markob52bbde2016-02-12 12:06:05 +00002673 if (in.IsRegisterPair()) {
2674 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2675 } else {
2676 DCHECK(in.GetConstant()->IsLongConstant());
2677 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2678 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2679 }
2680 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002681
2682 default:
2683 LOG(FATAL) << "Unexpected type conversion from " << input_type
2684 << " to " << result_type;
2685 }
2686 break;
2687
2688 case DataType::Type::kUint16:
2689 switch (input_type) {
2690 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002691 case DataType::Type::kInt16:
2692 case DataType::Type::kInt32:
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002693 if (in.IsRegister()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002694 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
2695 } else if (in.IsStackSlot()) {
2696 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002697 } else {
2698 DCHECK(in.GetConstant()->IsIntConstant());
2699 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002700 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2701 }
2702 break;
2703 case DataType::Type::kInt64:
2704 if (in.IsRegisterPair()) {
2705 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2706 } else if (in.IsDoubleStackSlot()) {
2707 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2708 } else {
2709 DCHECK(in.GetConstant()->IsLongConstant());
2710 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2711 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002712 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002713 break;
2714
2715 default:
2716 LOG(FATAL) << "Unexpected type conversion from " << input_type
2717 << " to " << result_type;
2718 }
2719 break;
2720
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002721 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002722 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002723 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002724 case DataType::Type::kInt32:
Roland Levillain01a8d712014-11-14 16:27:39 +00002725 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002726 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002727 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002728 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain01a8d712014-11-14 16:27:39 +00002729 } else {
2730 DCHECK(in.GetConstant()->IsIntConstant());
2731 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002732 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
Roland Levillain01a8d712014-11-14 16:27:39 +00002733 }
2734 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002735 case DataType::Type::kInt64:
2736 if (in.IsRegisterPair()) {
2737 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2738 } else if (in.IsDoubleStackSlot()) {
2739 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2740 } else {
2741 DCHECK(in.GetConstant()->IsLongConstant());
2742 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2743 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2744 }
2745 break;
Roland Levillain01a8d712014-11-14 16:27:39 +00002746
2747 default:
2748 LOG(FATAL) << "Unexpected type conversion from " << input_type
2749 << " to " << result_type;
2750 }
2751 break;
2752
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002753 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002754 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002755 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002756 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002757 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00002758 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002759 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain946e1432014-11-11 17:35:19 +00002760 } else {
2761 DCHECK(in.IsConstant());
2762 DCHECK(in.GetConstant()->IsLongConstant());
2763 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002764 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002765 }
2766 break;
2767
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002768 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002769 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2770 Register output = out.AsRegister<Register>();
2771 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002772 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002773
2774 __ movl(output, Immediate(kPrimIntMax));
2775 // temp = int-to-float(output)
2776 __ cvtsi2ss(temp, output);
2777 // if input >= temp goto done
2778 __ comiss(input, temp);
2779 __ j(kAboveEqual, &done);
2780 // if input == NaN goto nan
2781 __ j(kUnordered, &nan);
2782 // output = float-to-int-truncate(input)
2783 __ cvttss2si(output, input);
2784 __ jmp(&done);
2785 __ Bind(&nan);
2786 // output = 0
2787 __ xorl(output, output);
2788 __ Bind(&done);
2789 break;
2790 }
2791
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002792 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002793 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2794 Register output = out.AsRegister<Register>();
2795 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002796 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002797
2798 __ movl(output, Immediate(kPrimIntMax));
2799 // temp = int-to-double(output)
2800 __ cvtsi2sd(temp, output);
2801 // if input >= temp goto done
2802 __ comisd(input, temp);
2803 __ j(kAboveEqual, &done);
2804 // if input == NaN goto nan
2805 __ j(kUnordered, &nan);
2806 // output = double-to-int-truncate(input)
2807 __ cvttsd2si(output, input);
2808 __ jmp(&done);
2809 __ Bind(&nan);
2810 // output = 0
2811 __ xorl(output, output);
2812 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002813 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002814 }
Roland Levillain946e1432014-11-11 17:35:19 +00002815
2816 default:
2817 LOG(FATAL) << "Unexpected type conversion from " << input_type
2818 << " to " << result_type;
2819 }
2820 break;
2821
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002822 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002823 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002824 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002825 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002826 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002827 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002828 case DataType::Type::kInt16:
2829 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002830 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
2831 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002832 DCHECK_EQ(in.AsRegister<Register>(), EAX);
Roland Levillaindff1f282014-11-05 14:15:05 +00002833 __ cdq();
2834 break;
2835
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002836 case DataType::Type::kFloat32:
Serban Constantinescuba45db02016-07-12 22:53:02 +01002837 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002838 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00002839 break;
2840
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002841 case DataType::Type::kFloat64:
Serban Constantinescuba45db02016-07-12 22:53:02 +01002842 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002843 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00002844 break;
2845
2846 default:
2847 LOG(FATAL) << "Unexpected type conversion from " << input_type
2848 << " to " << result_type;
2849 }
2850 break;
2851
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002852 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002853 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002854 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002855 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002856 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002857 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002858 case DataType::Type::kInt16:
2859 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002860 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002861 break;
2862
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002863 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01002864 size_t adjustment = 0;
Roland Levillain6d0e4832014-11-27 18:31:21 +00002865
Roland Levillain232ade02015-04-20 15:14:36 +01002866 // Create stack space for the call to
2867 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstps below.
2868 // TODO: enhance register allocator to ask for stack temporaries.
2869 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002870 adjustment = DataType::Size(DataType::Type::kInt64);
Roland Levillain232ade02015-04-20 15:14:36 +01002871 __ subl(ESP, Immediate(adjustment));
2872 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002873
Roland Levillain232ade02015-04-20 15:14:36 +01002874 // Load the value to the FP stack, using temporaries if needed.
2875 PushOntoFPStack(in, 0, adjustment, false, true);
2876
2877 if (out.IsStackSlot()) {
2878 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
2879 } else {
2880 __ fstps(Address(ESP, 0));
2881 Location stack_temp = Location::StackSlot(0);
2882 codegen_->Move32(out, stack_temp);
2883 }
2884
2885 // Remove the temporary stack space we allocated.
2886 if (adjustment != 0) {
2887 __ addl(ESP, Immediate(adjustment));
2888 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002889 break;
2890 }
2891
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002892 case DataType::Type::kFloat64:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002893 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002894 break;
2895
2896 default:
2897 LOG(FATAL) << "Unexpected type conversion from " << input_type
2898 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002899 }
Roland Levillaincff13742014-11-17 14:32:17 +00002900 break;
2901
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002902 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002903 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002904 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002905 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002906 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002907 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002908 case DataType::Type::kInt16:
2909 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002910 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002911 break;
2912
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002913 case DataType::Type::kInt64: {
Roland Levillain232ade02015-04-20 15:14:36 +01002914 size_t adjustment = 0;
Roland Levillain647b9ed2014-11-27 12:06:00 +00002915
Roland Levillain232ade02015-04-20 15:14:36 +01002916 // Create stack space for the call to
2917 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstpl below.
2918 // TODO: enhance register allocator to ask for stack temporaries.
2919 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002920 adjustment = DataType::Size(DataType::Type::kInt64);
Roland Levillain232ade02015-04-20 15:14:36 +01002921 __ subl(ESP, Immediate(adjustment));
2922 }
2923
2924 // Load the value to the FP stack, using temporaries if needed.
2925 PushOntoFPStack(in, 0, adjustment, false, true);
2926
2927 if (out.IsDoubleStackSlot()) {
2928 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
2929 } else {
2930 __ fstpl(Address(ESP, 0));
2931 Location stack_temp = Location::DoubleStackSlot(0);
2932 codegen_->Move64(out, stack_temp);
2933 }
2934
2935 // Remove the temporary stack space we allocated.
2936 if (adjustment != 0) {
2937 __ addl(ESP, Immediate(adjustment));
2938 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002939 break;
2940 }
2941
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002942 case DataType::Type::kFloat32:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002943 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002944 break;
2945
2946 default:
2947 LOG(FATAL) << "Unexpected type conversion from " << input_type
2948 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002949 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002950 break;
2951
2952 default:
2953 LOG(FATAL) << "Unexpected type conversion from " << input_type
2954 << " to " << result_type;
2955 }
2956}
2957
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002958void LocationsBuilderX86::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002959 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002960 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002961 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002962 case DataType::Type::kInt32: {
Mark Mendell09b84632015-02-13 17:48:38 -05002963 locations->SetInAt(0, Location::RequiresRegister());
2964 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2965 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2966 break;
2967 }
2968
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002969 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002970 locations->SetInAt(0, Location::RequiresRegister());
2971 locations->SetInAt(1, Location::Any());
2972 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002973 break;
2974 }
2975
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002976 case DataType::Type::kFloat32:
2977 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002978 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002979 if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
2980 DCHECK(add->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00002981 } else if (add->InputAt(1)->IsConstant()) {
2982 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002983 } else {
2984 locations->SetInAt(1, Location::Any());
2985 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002986 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002987 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002988 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002989
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002990 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002991 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Elliott Hughesc1896c92018-11-29 11:33:18 -08002992 UNREACHABLE();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002993 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002994}
2995
2996void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
2997 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002998 Location first = locations->InAt(0);
2999 Location second = locations->InAt(1);
Mark Mendell09b84632015-02-13 17:48:38 -05003000 Location out = locations->Out();
3001
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003002 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003003 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003004 if (second.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003005 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3006 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003007 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3008 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
Mark Mendell09b84632015-02-13 17:48:38 -05003009 } else {
3010 __ leal(out.AsRegister<Register>(), Address(
3011 first.AsRegister<Register>(), second.AsRegister<Register>(), TIMES_1, 0));
3012 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003013 } else if (second.IsConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003014 int32_t value = second.GetConstant()->AsIntConstant()->GetValue();
3015 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3016 __ addl(out.AsRegister<Register>(), Immediate(value));
3017 } else {
3018 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
3019 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003020 } else {
Mark Mendell09b84632015-02-13 17:48:38 -05003021 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003022 __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003023 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003024 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003025 }
3026
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003027 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003028 if (second.IsRegisterPair()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003029 __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3030 __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003031 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003032 __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3033 __ adcl(first.AsRegisterPairHigh<Register>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003034 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003035 } else {
3036 DCHECK(second.IsConstant()) << second;
3037 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3038 __ addl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3039 __ adcl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003040 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003041 break;
3042 }
3043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003044 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003045 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003046 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003047 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3048 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003049 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003050 __ addss(first.AsFpuRegister<XmmRegister>(),
3051 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003052 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3053 const_area->GetBaseMethodAddress(),
3054 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003055 } else {
3056 DCHECK(second.IsStackSlot());
3057 __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003058 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003059 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003060 }
3061
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003062 case DataType::Type::kFloat64: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003063 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003064 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003065 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3066 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003067 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003068 __ addsd(first.AsFpuRegister<XmmRegister>(),
3069 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003070 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3071 const_area->GetBaseMethodAddress(),
3072 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003073 } else {
3074 DCHECK(second.IsDoubleStackSlot());
3075 __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003076 }
3077 break;
3078 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003079
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003080 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003081 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003082 }
3083}
3084
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003085void LocationsBuilderX86::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003086 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003087 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003088 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003089 case DataType::Type::kInt32:
3090 case DataType::Type::kInt64: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003091 locations->SetInAt(0, Location::RequiresRegister());
3092 locations->SetInAt(1, Location::Any());
3093 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003094 break;
3095 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003096 case DataType::Type::kFloat32:
3097 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003098 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003099 if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3100 DCHECK(sub->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003101 } else if (sub->InputAt(1)->IsConstant()) {
3102 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003103 } else {
3104 locations->SetInAt(1, Location::Any());
3105 }
Calin Juravle11351682014-10-23 15:38:15 +01003106 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003107 break;
Calin Juravle11351682014-10-23 15:38:15 +01003108 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003109
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003110 default:
Calin Juravle11351682014-10-23 15:38:15 +01003111 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003112 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003113}
3114
3115void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
3116 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003117 Location first = locations->InAt(0);
3118 Location second = locations->InAt(1);
Calin Juravle11351682014-10-23 15:38:15 +01003119 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003120 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003121 case DataType::Type::kInt32: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003122 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003123 __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003124 } else if (second.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00003125 __ subl(first.AsRegister<Register>(),
3126 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003127 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003128 __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003129 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003130 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003131 }
3132
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003133 case DataType::Type::kInt64: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003134 if (second.IsRegisterPair()) {
Calin Juravle11351682014-10-23 15:38:15 +01003135 __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3136 __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003137 } else if (second.IsDoubleStackSlot()) {
Calin Juravle11351682014-10-23 15:38:15 +01003138 __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003139 __ sbbl(first.AsRegisterPairHigh<Register>(),
3140 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003141 } else {
3142 DCHECK(second.IsConstant()) << second;
3143 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3144 __ subl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3145 __ sbbl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003146 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003147 break;
3148 }
3149
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003150 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003151 if (second.IsFpuRegister()) {
3152 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3153 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3154 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003155 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003156 __ subss(first.AsFpuRegister<XmmRegister>(),
3157 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003158 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3159 const_area->GetBaseMethodAddress(),
3160 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003161 } else {
3162 DCHECK(second.IsStackSlot());
3163 __ subss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3164 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003165 break;
Calin Juravle11351682014-10-23 15:38:15 +01003166 }
3167
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003168 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003169 if (second.IsFpuRegister()) {
3170 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3171 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3172 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003173 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003174 __ subsd(first.AsFpuRegister<XmmRegister>(),
3175 codegen_->LiteralDoubleAddress(
3176 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003177 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003178 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3179 } else {
3180 DCHECK(second.IsDoubleStackSlot());
3181 __ subsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3182 }
Calin Juravle11351682014-10-23 15:38:15 +01003183 break;
3184 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003185
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003186 default:
Calin Juravle11351682014-10-23 15:38:15 +01003187 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003188 }
3189}
3190
Calin Juravle34bacdf2014-10-07 20:23:36 +01003191void LocationsBuilderX86::VisitMul(HMul* mul) {
3192 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003193 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003194 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003195 case DataType::Type::kInt32:
Calin Juravle34bacdf2014-10-07 20:23:36 +01003196 locations->SetInAt(0, Location::RequiresRegister());
3197 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003198 if (mul->InputAt(1)->IsIntConstant()) {
3199 // Can use 3 operand multiply.
3200 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3201 } else {
3202 locations->SetOut(Location::SameAsFirstInput());
3203 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003204 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003205 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003206 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003207 locations->SetInAt(1, Location::Any());
3208 locations->SetOut(Location::SameAsFirstInput());
3209 // Needed for imul on 32bits with 64bits output.
3210 locations->AddTemp(Location::RegisterLocation(EAX));
3211 locations->AddTemp(Location::RegisterLocation(EDX));
3212 break;
3213 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003214 case DataType::Type::kFloat32:
3215 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003216 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003217 if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3218 DCHECK(mul->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003219 } else if (mul->InputAt(1)->IsConstant()) {
3220 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003221 } else {
3222 locations->SetInAt(1, Location::Any());
3223 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003224 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003225 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003226 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003227
3228 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003229 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003230 }
3231}
3232
3233void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
3234 LocationSummary* locations = mul->GetLocations();
3235 Location first = locations->InAt(0);
3236 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003237 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003238
3239 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003240 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003241 // The constant may have ended up in a register, so test explicitly to avoid
3242 // problems where the output may not be the same as the first operand.
3243 if (mul->InputAt(1)->IsIntConstant()) {
3244 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3245 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3246 } else if (second.IsRegister()) {
3247 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003248 __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003249 } else {
3250 DCHECK(second.IsStackSlot());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003251 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003252 __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003253 }
3254 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003255
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003256 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003257 Register in1_hi = first.AsRegisterPairHigh<Register>();
3258 Register in1_lo = first.AsRegisterPairLow<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003259 Register eax = locations->GetTemp(0).AsRegister<Register>();
3260 Register edx = locations->GetTemp(1).AsRegister<Register>();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003261
3262 DCHECK_EQ(EAX, eax);
3263 DCHECK_EQ(EDX, edx);
3264
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003265 // input: in1 - 64 bits, in2 - 64 bits.
Calin Juravle34bacdf2014-10-07 20:23:36 +01003266 // output: in1
3267 // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3268 // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3269 // parts: in1.lo = (in1.lo * in2.lo)[31:0]
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003270 if (second.IsConstant()) {
3271 DCHECK(second.GetConstant()->IsLongConstant());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003272
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003273 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3274 int32_t low_value = Low32Bits(value);
3275 int32_t high_value = High32Bits(value);
3276 Immediate low(low_value);
3277 Immediate high(high_value);
3278
3279 __ movl(eax, high);
3280 // eax <- in1.lo * in2.hi
3281 __ imull(eax, in1_lo);
3282 // in1.hi <- in1.hi * in2.lo
3283 __ imull(in1_hi, low);
3284 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3285 __ addl(in1_hi, eax);
3286 // move in2_lo to eax to prepare for double precision
3287 __ movl(eax, low);
3288 // edx:eax <- in1.lo * in2.lo
3289 __ mull(in1_lo);
3290 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3291 __ addl(in1_hi, edx);
3292 // in1.lo <- (in1.lo * in2.lo)[31:0];
3293 __ movl(in1_lo, eax);
3294 } else if (second.IsRegisterPair()) {
3295 Register in2_hi = second.AsRegisterPairHigh<Register>();
3296 Register in2_lo = second.AsRegisterPairLow<Register>();
3297
3298 __ movl(eax, in2_hi);
3299 // eax <- in1.lo * in2.hi
3300 __ imull(eax, in1_lo);
3301 // in1.hi <- in1.hi * in2.lo
3302 __ imull(in1_hi, in2_lo);
3303 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3304 __ addl(in1_hi, eax);
3305 // move in1_lo to eax to prepare for double precision
3306 __ movl(eax, in1_lo);
3307 // edx:eax <- in1.lo * in2.lo
3308 __ mull(in2_lo);
3309 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3310 __ addl(in1_hi, edx);
3311 // in1.lo <- (in1.lo * in2.lo)[31:0];
3312 __ movl(in1_lo, eax);
3313 } else {
3314 DCHECK(second.IsDoubleStackSlot()) << second;
3315 Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
3316 Address in2_lo(ESP, second.GetStackIndex());
3317
3318 __ movl(eax, in2_hi);
3319 // eax <- in1.lo * in2.hi
3320 __ imull(eax, in1_lo);
3321 // in1.hi <- in1.hi * in2.lo
3322 __ imull(in1_hi, in2_lo);
3323 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3324 __ addl(in1_hi, eax);
3325 // move in1_lo to eax to prepare for double precision
3326 __ movl(eax, in1_lo);
3327 // edx:eax <- in1.lo * in2.lo
3328 __ mull(in2_lo);
3329 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3330 __ addl(in1_hi, edx);
3331 // in1.lo <- (in1.lo * in2.lo)[31:0];
3332 __ movl(in1_lo, eax);
3333 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003334
3335 break;
3336 }
3337
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003338 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003339 DCHECK(first.Equals(locations->Out()));
3340 if (second.IsFpuRegister()) {
3341 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3342 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3343 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003344 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003345 __ mulss(first.AsFpuRegister<XmmRegister>(),
3346 codegen_->LiteralFloatAddress(
3347 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003348 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003349 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3350 } else {
3351 DCHECK(second.IsStackSlot());
3352 __ mulss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3353 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003354 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003355 }
3356
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003357 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003358 DCHECK(first.Equals(locations->Out()));
3359 if (second.IsFpuRegister()) {
3360 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3361 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3362 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003363 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003364 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3365 codegen_->LiteralDoubleAddress(
3366 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003367 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003368 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3369 } else {
3370 DCHECK(second.IsDoubleStackSlot());
3371 __ mulsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3372 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003373 break;
3374 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003375
3376 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003377 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003378 }
3379}
3380
Roland Levillain232ade02015-04-20 15:14:36 +01003381void InstructionCodeGeneratorX86::PushOntoFPStack(Location source,
3382 uint32_t temp_offset,
3383 uint32_t stack_adjustment,
3384 bool is_fp,
3385 bool is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003386 if (source.IsStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003387 DCHECK(!is_wide);
3388 if (is_fp) {
3389 __ flds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3390 } else {
3391 __ filds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3392 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003393 } else if (source.IsDoubleStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003394 DCHECK(is_wide);
3395 if (is_fp) {
3396 __ fldl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3397 } else {
3398 __ fildl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3399 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003400 } else {
3401 // Write the value to the temporary location on the stack and load to FP stack.
Roland Levillain232ade02015-04-20 15:14:36 +01003402 if (!is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003403 Location stack_temp = Location::StackSlot(temp_offset);
3404 codegen_->Move32(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003405 if (is_fp) {
3406 __ flds(Address(ESP, temp_offset));
3407 } else {
3408 __ filds(Address(ESP, temp_offset));
3409 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003410 } else {
3411 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3412 codegen_->Move64(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003413 if (is_fp) {
3414 __ fldl(Address(ESP, temp_offset));
3415 } else {
3416 __ fildl(Address(ESP, temp_offset));
3417 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003418 }
3419 }
3420}
3421
3422void InstructionCodeGeneratorX86::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003423 DataType::Type type = rem->GetResultType();
3424 bool is_float = type == DataType::Type::kFloat32;
3425 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003426 LocationSummary* locations = rem->GetLocations();
3427 Location first = locations->InAt(0);
3428 Location second = locations->InAt(1);
3429 Location out = locations->Out();
3430
3431 // Create stack space for 2 elements.
3432 // TODO: enhance register allocator to ask for stack temporaries.
3433 __ subl(ESP, Immediate(2 * elem_size));
3434
3435 // Load the values to the FP stack in reverse order, using temporaries if needed.
Roland Levillain232ade02015-04-20 15:14:36 +01003436 const bool is_wide = !is_float;
Andreas Gampe3db70682018-12-26 15:12:03 -08003437 PushOntoFPStack(second, elem_size, 2 * elem_size, /* is_fp= */ true, is_wide);
3438 PushOntoFPStack(first, 0, 2 * elem_size, /* is_fp= */ true, is_wide);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003439
3440 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003441 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003442 __ Bind(&retry);
3443 __ fprem();
3444
3445 // Move FP status to AX.
3446 __ fstsw();
3447
3448 // And see if the argument reduction is complete. This is signaled by the
3449 // C2 FPU flag bit set to 0.
3450 __ andl(EAX, Immediate(kC2ConditionMask));
3451 __ j(kNotEqual, &retry);
3452
3453 // We have settled on the final value. Retrieve it into an XMM register.
3454 // Store FP top of stack to real stack.
3455 if (is_float) {
3456 __ fsts(Address(ESP, 0));
3457 } else {
3458 __ fstl(Address(ESP, 0));
3459 }
3460
3461 // Pop the 2 items from the FP stack.
3462 __ fucompp();
3463
3464 // Load the value from the stack into an XMM register.
3465 DCHECK(out.IsFpuRegister()) << out;
3466 if (is_float) {
3467 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3468 } else {
3469 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3470 }
3471
3472 // And remove the temporary stack space we allocated.
3473 __ addl(ESP, Immediate(2 * elem_size));
3474}
3475
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003476
3477void InstructionCodeGeneratorX86::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3478 DCHECK(instruction->IsDiv() || instruction->IsRem());
3479
3480 LocationSummary* locations = instruction->GetLocations();
3481 DCHECK(locations->InAt(1).IsConstant());
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003482 DCHECK(locations->InAt(1).GetConstant()->IsIntConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003483
3484 Register out_register = locations->Out().AsRegister<Register>();
3485 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003486 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003487
3488 DCHECK(imm == 1 || imm == -1);
3489
3490 if (instruction->IsRem()) {
3491 __ xorl(out_register, out_register);
3492 } else {
3493 __ movl(out_register, input_register);
3494 if (imm == -1) {
3495 __ negl(out_register);
3496 }
3497 }
3498}
3499
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303500void InstructionCodeGeneratorX86::RemByPowerOfTwo(HRem* instruction) {
3501 LocationSummary* locations = instruction->GetLocations();
3502 Location second = locations->InAt(1);
3503
3504 Register out = locations->Out().AsRegister<Register>();
3505 Register numerator = locations->InAt(0).AsRegister<Register>();
3506
3507 int32_t imm = Int64FromConstant(second.GetConstant());
3508 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3509 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
3510
3511 Register tmp = locations->GetTemp(0).AsRegister<Register>();
3512 NearLabel done;
3513 __ movl(out, numerator);
3514 __ andl(out, Immediate(abs_imm-1));
3515 __ j(Condition::kZero, &done);
3516 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3517 __ testl(numerator, numerator);
3518 __ cmovl(Condition::kLess, out, tmp);
3519 __ Bind(&done);
3520}
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003521
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003522void InstructionCodeGeneratorX86::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003523 LocationSummary* locations = instruction->GetLocations();
3524
3525 Register out_register = locations->Out().AsRegister<Register>();
3526 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003527 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003528 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3529 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003530
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003531 Register num = locations->GetTemp(0).AsRegister<Register>();
3532
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003533 __ leal(num, Address(input_register, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003534 __ testl(input_register, input_register);
3535 __ cmovl(kGreaterEqual, num, input_register);
3536 int shift = CTZ(imm);
3537 __ sarl(num, Immediate(shift));
3538
3539 if (imm < 0) {
3540 __ negl(num);
3541 }
3542
3543 __ movl(out_register, num);
3544}
3545
3546void InstructionCodeGeneratorX86::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3547 DCHECK(instruction->IsDiv() || instruction->IsRem());
3548
3549 LocationSummary* locations = instruction->GetLocations();
3550 int imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
3551
3552 Register eax = locations->InAt(0).AsRegister<Register>();
3553 Register out = locations->Out().AsRegister<Register>();
3554 Register num;
3555 Register edx;
3556
3557 if (instruction->IsDiv()) {
3558 edx = locations->GetTemp(0).AsRegister<Register>();
3559 num = locations->GetTemp(1).AsRegister<Register>();
3560 } else {
3561 edx = locations->Out().AsRegister<Register>();
3562 num = locations->GetTemp(0).AsRegister<Register>();
3563 }
3564
3565 DCHECK_EQ(EAX, eax);
3566 DCHECK_EQ(EDX, edx);
3567 if (instruction->IsDiv()) {
3568 DCHECK_EQ(EAX, out);
3569 } else {
3570 DCHECK_EQ(EDX, out);
3571 }
3572
3573 int64_t magic;
3574 int shift;
Andreas Gampe3db70682018-12-26 15:12:03 -08003575 CalculateMagicAndShiftForDivRem(imm, /* is_long= */ false, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003576
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003577 // Save the numerator.
3578 __ movl(num, eax);
3579
3580 // EAX = magic
3581 __ movl(eax, Immediate(magic));
3582
3583 // EDX:EAX = magic * numerator
3584 __ imull(num);
3585
3586 if (imm > 0 && magic < 0) {
3587 // EDX += num
3588 __ addl(edx, num);
3589 } else if (imm < 0 && magic > 0) {
3590 __ subl(edx, num);
3591 }
3592
3593 // Shift if needed.
3594 if (shift != 0) {
3595 __ sarl(edx, Immediate(shift));
3596 }
3597
3598 // EDX += 1 if EDX < 0
3599 __ movl(eax, edx);
3600 __ shrl(edx, Immediate(31));
3601 __ addl(edx, eax);
3602
3603 if (instruction->IsRem()) {
3604 __ movl(eax, num);
3605 __ imull(edx, Immediate(imm));
3606 __ subl(eax, edx);
3607 __ movl(edx, eax);
3608 } else {
3609 __ movl(eax, edx);
3610 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003611}
3612
Calin Juravlebacfec32014-11-14 15:54:36 +00003613void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3614 DCHECK(instruction->IsDiv() || instruction->IsRem());
3615
3616 LocationSummary* locations = instruction->GetLocations();
3617 Location out = locations->Out();
3618 Location first = locations->InAt(0);
3619 Location second = locations->InAt(1);
3620 bool is_div = instruction->IsDiv();
3621
3622 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003623 case DataType::Type::kInt32: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003624 DCHECK_EQ(EAX, first.AsRegister<Register>());
3625 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
Calin Juravlebacfec32014-11-14 15:54:36 +00003626
Vladimir Marko13c86fd2015-11-11 12:37:46 +00003627 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003628 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003629
3630 if (imm == 0) {
3631 // Do not generate anything for 0. DivZeroCheck would forbid any generated code.
3632 } else if (imm == 1 || imm == -1) {
3633 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303634 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3635 if (is_div) {
3636 DivByPowerOfTwo(instruction->AsDiv());
3637 } else {
3638 RemByPowerOfTwo(instruction->AsRem());
3639 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003640 } else {
3641 DCHECK(imm <= -2 || imm >= 2);
3642 GenerateDivRemWithAnyConstant(instruction);
3643 }
3644 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01003645 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86(
David Srbecky9cd6d372016-02-09 15:24:47 +00003646 instruction, out.AsRegister<Register>(), is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003647 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003648
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003649 Register second_reg = second.AsRegister<Register>();
3650 // 0x80000000/-1 triggers an arithmetic exception!
3651 // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
3652 // it's safe to just use negl instead of more complex comparisons.
Calin Juravlebacfec32014-11-14 15:54:36 +00003653
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003654 __ cmpl(second_reg, Immediate(-1));
3655 __ j(kEqual, slow_path->GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +00003656
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003657 // edx:eax <- sign-extended of eax
3658 __ cdq();
3659 // eax = quotient, edx = remainder
3660 __ idivl(second_reg);
3661 __ Bind(slow_path->GetExitLabel());
3662 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003663 break;
3664 }
3665
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003666 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003667 InvokeRuntimeCallingConvention calling_convention;
3668 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
3669 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
3670 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
3671 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
3672 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
3673 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
3674
3675 if (is_div) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003676 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003677 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003678 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003679 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003680 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003681 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003682 break;
3683 }
3684
3685 default:
3686 LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
3687 }
3688}
3689
Calin Juravle7c4954d2014-10-28 16:57:40 +00003690void LocationsBuilderX86::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003691 LocationSummary::CallKind call_kind = (div->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003692 ? LocationSummary::kCallOnMainOnly
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003693 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01003694 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(div, call_kind);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003695
Calin Juravle7c4954d2014-10-28 16:57:40 +00003696 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003697 case DataType::Type::kInt32: {
Calin Juravled0d48522014-11-04 16:40:20 +00003698 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003699 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003700 locations->SetOut(Location::SameAsFirstInput());
3701 // Intel uses edx:eax as the dividend.
3702 locations->AddTemp(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003703 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3704 // which enforces results to be in EAX and EDX, things are simpler if we use EAX also as
3705 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003706 if (div->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003707 locations->AddTemp(Location::RequiresRegister());
3708 }
Calin Juravled0d48522014-11-04 16:40:20 +00003709 break;
3710 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003711 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003712 InvokeRuntimeCallingConvention calling_convention;
3713 locations->SetInAt(0, Location::RegisterPairLocation(
3714 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3715 locations->SetInAt(1, Location::RegisterPairLocation(
3716 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3717 // Runtime helper puts the result in EAX, EDX.
3718 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Calin Juravle7c4954d2014-10-28 16:57:40 +00003719 break;
3720 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003721 case DataType::Type::kFloat32:
3722 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003723 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003724 if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3725 DCHECK(div->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003726 } else if (div->InputAt(1)->IsConstant()) {
3727 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003728 } else {
3729 locations->SetInAt(1, Location::Any());
3730 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003731 locations->SetOut(Location::SameAsFirstInput());
3732 break;
3733 }
3734
3735 default:
3736 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3737 }
3738}
3739
3740void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
3741 LocationSummary* locations = div->GetLocations();
3742 Location first = locations->InAt(0);
3743 Location second = locations->InAt(1);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003744
3745 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003746 case DataType::Type::kInt32:
3747 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003748 GenerateDivRemIntegral(div);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003749 break;
3750 }
3751
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003752 case DataType::Type::kFloat32: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003753 if (second.IsFpuRegister()) {
3754 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3755 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3756 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003757 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003758 __ divss(first.AsFpuRegister<XmmRegister>(),
3759 codegen_->LiteralFloatAddress(
3760 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003761 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003762 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3763 } else {
3764 DCHECK(second.IsStackSlot());
3765 __ divss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3766 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003767 break;
3768 }
3769
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003770 case DataType::Type::kFloat64: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003771 if (second.IsFpuRegister()) {
3772 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3773 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3774 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003775 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003776 __ divsd(first.AsFpuRegister<XmmRegister>(),
3777 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003778 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3779 const_area->GetBaseMethodAddress(),
3780 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003781 } else {
3782 DCHECK(second.IsDoubleStackSlot());
3783 __ divsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3784 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003785 break;
3786 }
3787
3788 default:
3789 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3790 }
3791}
3792
Calin Juravlebacfec32014-11-14 15:54:36 +00003793void LocationsBuilderX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003794 DataType::Type type = rem->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003795
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003796 LocationSummary::CallKind call_kind = (rem->GetResultType() == DataType::Type::kInt64)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003797 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003798 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01003799 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Calin Juravlebacfec32014-11-14 15:54:36 +00003800
Calin Juravled2ec87d2014-12-08 14:24:46 +00003801 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003802 case DataType::Type::kInt32: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003803 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003804 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003805 locations->SetOut(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003806 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3807 // which enforces results to be in EAX and EDX, things are simpler if we use EDX also as
3808 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003809 if (rem->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003810 locations->AddTemp(Location::RequiresRegister());
3811 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003812 break;
3813 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003814 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003815 InvokeRuntimeCallingConvention calling_convention;
3816 locations->SetInAt(0, Location::RegisterPairLocation(
3817 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3818 locations->SetInAt(1, Location::RegisterPairLocation(
3819 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3820 // Runtime helper puts the result in EAX, EDX.
3821 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
3822 break;
3823 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003824 case DataType::Type::kFloat64:
3825 case DataType::Type::kFloat32: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003826 locations->SetInAt(0, Location::Any());
3827 locations->SetInAt(1, Location::Any());
3828 locations->SetOut(Location::RequiresFpuRegister());
3829 locations->AddTemp(Location::RegisterLocation(EAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003830 break;
3831 }
3832
3833 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003834 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003835 }
3836}
3837
3838void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003839 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003840 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003841 case DataType::Type::kInt32:
3842 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003843 GenerateDivRemIntegral(rem);
3844 break;
3845 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003846 case DataType::Type::kFloat32:
3847 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003848 GenerateRemFP(rem);
Calin Juravlebacfec32014-11-14 15:54:36 +00003849 break;
3850 }
3851 default:
3852 LOG(FATAL) << "Unexpected rem type " << type;
3853 }
3854}
3855
Aart Bik1f8d51b2018-02-15 10:42:37 -08003856static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
3857 LocationSummary* locations = new (allocator) LocationSummary(minmax);
3858 switch (minmax->GetResultType()) {
3859 case DataType::Type::kInt32:
3860 locations->SetInAt(0, Location::RequiresRegister());
3861 locations->SetInAt(1, Location::RequiresRegister());
3862 locations->SetOut(Location::SameAsFirstInput());
3863 break;
3864 case DataType::Type::kInt64:
3865 locations->SetInAt(0, Location::RequiresRegister());
3866 locations->SetInAt(1, Location::RequiresRegister());
3867 locations->SetOut(Location::SameAsFirstInput());
3868 // Register to use to perform a long subtract to set cc.
3869 locations->AddTemp(Location::RequiresRegister());
3870 break;
3871 case DataType::Type::kFloat32:
3872 locations->SetInAt(0, Location::RequiresFpuRegister());
3873 locations->SetInAt(1, Location::RequiresFpuRegister());
3874 locations->SetOut(Location::SameAsFirstInput());
3875 locations->AddTemp(Location::RequiresRegister());
3876 break;
3877 case DataType::Type::kFloat64:
3878 locations->SetInAt(0, Location::RequiresFpuRegister());
3879 locations->SetInAt(1, Location::RequiresFpuRegister());
3880 locations->SetOut(Location::SameAsFirstInput());
3881 break;
3882 default:
3883 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
3884 }
3885}
3886
Aart Bik351df3e2018-03-07 11:54:57 -08003887void InstructionCodeGeneratorX86::GenerateMinMaxInt(LocationSummary* locations,
3888 bool is_min,
3889 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08003890 Location op1_loc = locations->InAt(0);
3891 Location op2_loc = locations->InAt(1);
3892
3893 // Shortcut for same input locations.
3894 if (op1_loc.Equals(op2_loc)) {
3895 // Can return immediately, as op1_loc == out_loc.
3896 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
3897 // a copy here.
3898 DCHECK(locations->Out().Equals(op1_loc));
3899 return;
3900 }
3901
3902 if (type == DataType::Type::kInt64) {
3903 // Need to perform a subtract to get the sign right.
3904 // op1 is already in the same location as the output.
3905 Location output = locations->Out();
3906 Register output_lo = output.AsRegisterPairLow<Register>();
3907 Register output_hi = output.AsRegisterPairHigh<Register>();
3908
3909 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
3910 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
3911
3912 // The comparison is performed by subtracting the second operand from
3913 // the first operand and then setting the status flags in the same
3914 // manner as the SUB instruction."
3915 __ cmpl(output_lo, op2_lo);
3916
3917 // Now use a temp and the borrow to finish the subtraction of op2_hi.
3918 Register temp = locations->GetTemp(0).AsRegister<Register>();
3919 __ movl(temp, output_hi);
3920 __ sbbl(temp, op2_hi);
3921
3922 // Now the condition code is correct.
3923 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
3924 __ cmovl(cond, output_lo, op2_lo);
3925 __ cmovl(cond, output_hi, op2_hi);
3926 } else {
3927 DCHECK_EQ(type, DataType::Type::kInt32);
3928 Register out = locations->Out().AsRegister<Register>();
3929 Register op2 = op2_loc.AsRegister<Register>();
3930
3931 // (out := op1)
3932 // out <=? op2
3933 // if out is min jmp done
3934 // out := op2
3935 // done:
3936
3937 __ cmpl(out, op2);
3938 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
3939 __ cmovl(cond, out, op2);
3940 }
3941}
3942
3943void InstructionCodeGeneratorX86::GenerateMinMaxFP(LocationSummary* locations,
3944 bool is_min,
3945 DataType::Type type) {
3946 Location op1_loc = locations->InAt(0);
3947 Location op2_loc = locations->InAt(1);
3948 Location out_loc = locations->Out();
3949 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
3950
3951 // Shortcut for same input locations.
3952 if (op1_loc.Equals(op2_loc)) {
3953 DCHECK(out_loc.Equals(op1_loc));
3954 return;
3955 }
3956
3957 // (out := op1)
3958 // out <=? op2
3959 // if Nan jmp Nan_label
3960 // if out is min jmp done
3961 // if op2 is min jmp op2_label
3962 // handle -0/+0
3963 // jmp done
3964 // Nan_label:
3965 // out := NaN
3966 // op2_label:
3967 // out := op2
3968 // done:
3969 //
3970 // This removes one jmp, but needs to copy one input (op1) to out.
3971 //
3972 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
3973
3974 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
3975
3976 NearLabel nan, done, op2_label;
3977 if (type == DataType::Type::kFloat64) {
3978 __ ucomisd(out, op2);
3979 } else {
3980 DCHECK_EQ(type, DataType::Type::kFloat32);
3981 __ ucomiss(out, op2);
3982 }
3983
3984 __ j(Condition::kParityEven, &nan);
3985
3986 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
3987 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
3988
3989 // Handle 0.0/-0.0.
3990 if (is_min) {
3991 if (type == DataType::Type::kFloat64) {
3992 __ orpd(out, op2);
3993 } else {
3994 __ orps(out, op2);
3995 }
3996 } else {
3997 if (type == DataType::Type::kFloat64) {
3998 __ andpd(out, op2);
3999 } else {
4000 __ andps(out, op2);
4001 }
4002 }
4003 __ jmp(&done);
4004
4005 // NaN handling.
4006 __ Bind(&nan);
4007 if (type == DataType::Type::kFloat64) {
4008 // TODO: Use a constant from the constant table (requires extra input).
4009 __ LoadLongConstant(out, kDoubleNaN);
4010 } else {
4011 Register constant = locations->GetTemp(0).AsRegister<Register>();
4012 __ movl(constant, Immediate(kFloatNaN));
4013 __ movd(out, constant);
4014 }
4015 __ jmp(&done);
4016
4017 // out := op2;
4018 __ Bind(&op2_label);
4019 if (type == DataType::Type::kFloat64) {
4020 __ movsd(out, op2);
4021 } else {
4022 __ movss(out, op2);
4023 }
4024
4025 // Done.
4026 __ Bind(&done);
4027}
4028
Aart Bik351df3e2018-03-07 11:54:57 -08004029void InstructionCodeGeneratorX86::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4030 DataType::Type type = minmax->GetResultType();
4031 switch (type) {
4032 case DataType::Type::kInt32:
4033 case DataType::Type::kInt64:
4034 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4035 break;
4036 case DataType::Type::kFloat32:
4037 case DataType::Type::kFloat64:
4038 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4039 break;
4040 default:
4041 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4042 }
4043}
4044
Aart Bik1f8d51b2018-02-15 10:42:37 -08004045void LocationsBuilderX86::VisitMin(HMin* min) {
4046 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4047}
4048
4049void InstructionCodeGeneratorX86::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004050 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004051}
4052
4053void LocationsBuilderX86::VisitMax(HMax* max) {
4054 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4055}
4056
4057void InstructionCodeGeneratorX86::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004058 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004059}
4060
Aart Bik3dad3412018-02-28 12:01:46 -08004061void LocationsBuilderX86::VisitAbs(HAbs* abs) {
4062 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4063 switch (abs->GetResultType()) {
4064 case DataType::Type::kInt32:
4065 locations->SetInAt(0, Location::RegisterLocation(EAX));
4066 locations->SetOut(Location::SameAsFirstInput());
4067 locations->AddTemp(Location::RegisterLocation(EDX));
4068 break;
4069 case DataType::Type::kInt64:
4070 locations->SetInAt(0, Location::RequiresRegister());
4071 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4072 locations->AddTemp(Location::RequiresRegister());
4073 break;
4074 case DataType::Type::kFloat32:
4075 locations->SetInAt(0, Location::RequiresFpuRegister());
4076 locations->SetOut(Location::SameAsFirstInput());
4077 locations->AddTemp(Location::RequiresFpuRegister());
4078 locations->AddTemp(Location::RequiresRegister());
4079 break;
4080 case DataType::Type::kFloat64:
4081 locations->SetInAt(0, Location::RequiresFpuRegister());
4082 locations->SetOut(Location::SameAsFirstInput());
4083 locations->AddTemp(Location::RequiresFpuRegister());
4084 break;
4085 default:
4086 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4087 }
4088}
4089
4090void InstructionCodeGeneratorX86::VisitAbs(HAbs* abs) {
4091 LocationSummary* locations = abs->GetLocations();
4092 switch (abs->GetResultType()) {
4093 case DataType::Type::kInt32: {
4094 Register out = locations->Out().AsRegister<Register>();
4095 DCHECK_EQ(out, EAX);
4096 Register temp = locations->GetTemp(0).AsRegister<Register>();
4097 DCHECK_EQ(temp, EDX);
4098 // Sign extend EAX into EDX.
4099 __ cdq();
4100 // XOR EAX with sign.
4101 __ xorl(EAX, EDX);
4102 // Subtract out sign to correct.
4103 __ subl(EAX, EDX);
4104 // The result is in EAX.
4105 break;
4106 }
4107 case DataType::Type::kInt64: {
4108 Location input = locations->InAt(0);
4109 Register input_lo = input.AsRegisterPairLow<Register>();
4110 Register input_hi = input.AsRegisterPairHigh<Register>();
4111 Location output = locations->Out();
4112 Register output_lo = output.AsRegisterPairLow<Register>();
4113 Register output_hi = output.AsRegisterPairHigh<Register>();
4114 Register temp = locations->GetTemp(0).AsRegister<Register>();
4115 // Compute the sign into the temporary.
4116 __ movl(temp, input_hi);
4117 __ sarl(temp, Immediate(31));
4118 // Store the sign into the output.
4119 __ movl(output_lo, temp);
4120 __ movl(output_hi, temp);
4121 // XOR the input to the output.
4122 __ xorl(output_lo, input_lo);
4123 __ xorl(output_hi, input_hi);
4124 // Subtract the sign.
4125 __ subl(output_lo, temp);
4126 __ sbbl(output_hi, temp);
4127 break;
4128 }
4129 case DataType::Type::kFloat32: {
4130 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4131 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4132 Register constant = locations->GetTemp(1).AsRegister<Register>();
4133 __ movl(constant, Immediate(INT32_C(0x7FFFFFFF)));
4134 __ movd(temp, constant);
4135 __ andps(out, temp);
4136 break;
4137 }
4138 case DataType::Type::kFloat64: {
4139 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4140 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4141 // TODO: Use a constant from the constant table (requires extra input).
4142 __ LoadLongConstant(temp, INT64_C(0x7FFFFFFFFFFFFFFF));
4143 __ andpd(out, temp);
4144 break;
4145 }
4146 default:
4147 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4148 }
4149}
4150
Calin Juravled0d48522014-11-04 16:40:20 +00004151void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004152 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004153 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004154 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004155 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004156 case DataType::Type::kInt8:
4157 case DataType::Type::kUint16:
4158 case DataType::Type::kInt16:
4159 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004160 locations->SetInAt(0, Location::Any());
4161 break;
4162 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004163 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004164 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
4165 if (!instruction->IsConstant()) {
4166 locations->AddTemp(Location::RequiresRegister());
4167 }
4168 break;
4169 }
4170 default:
4171 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
4172 }
Calin Juravled0d48522014-11-04 16:40:20 +00004173}
4174
4175void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004176 SlowPathCode* slow_path =
4177 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004178 codegen_->AddSlowPath(slow_path);
4179
4180 LocationSummary* locations = instruction->GetLocations();
4181 Location value = locations->InAt(0);
4182
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004183 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004184 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004185 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004186 case DataType::Type::kInt8:
4187 case DataType::Type::kUint16:
4188 case DataType::Type::kInt16:
4189 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004190 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004191 __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004192 __ j(kEqual, slow_path->GetEntryLabel());
4193 } else if (value.IsStackSlot()) {
4194 __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
4195 __ j(kEqual, slow_path->GetEntryLabel());
4196 } else {
4197 DCHECK(value.IsConstant()) << value;
4198 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004199 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004200 }
4201 }
4202 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004203 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004204 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004205 if (value.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004206 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004207 __ movl(temp, value.AsRegisterPairLow<Register>());
4208 __ orl(temp, value.AsRegisterPairHigh<Register>());
4209 __ j(kEqual, slow_path->GetEntryLabel());
4210 } else {
4211 DCHECK(value.IsConstant()) << value;
4212 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
4213 __ jmp(slow_path->GetEntryLabel());
4214 }
4215 }
4216 break;
4217 }
4218 default:
4219 LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004220 }
Calin Juravled0d48522014-11-04 16:40:20 +00004221}
4222
Calin Juravle9aec02f2014-11-18 23:06:35 +00004223void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
4224 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4225
4226 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004227 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004228
4229 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004230 case DataType::Type::kInt32:
4231 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004232 // Can't have Location::Any() and output SameAsFirstInput()
Calin Juravle9aec02f2014-11-18 23:06:35 +00004233 locations->SetInAt(0, Location::RequiresRegister());
Mark P Mendell73945692015-04-29 14:56:17 +00004234 // The shift count needs to be in CL or a constant.
4235 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
Calin Juravle9aec02f2014-11-18 23:06:35 +00004236 locations->SetOut(Location::SameAsFirstInput());
4237 break;
4238 }
4239 default:
4240 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4241 }
4242}
4243
4244void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
4245 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4246
4247 LocationSummary* locations = op->GetLocations();
4248 Location first = locations->InAt(0);
4249 Location second = locations->InAt(1);
4250 DCHECK(first.Equals(locations->Out()));
4251
4252 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004253 case DataType::Type::kInt32: {
Mark P Mendell73945692015-04-29 14:56:17 +00004254 DCHECK(first.IsRegister());
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004255 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004256 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004257 Register second_reg = second.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004258 DCHECK_EQ(ECX, second_reg);
4259 if (op->IsShl()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004260 __ shll(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004261 } else if (op->IsShr()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004262 __ sarl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004263 } else {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004264 __ shrl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004265 }
4266 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004267 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004268 if (shift == 0) {
4269 return;
4270 }
4271 Immediate imm(shift);
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004272 if (op->IsShl()) {
4273 __ shll(first_reg, imm);
4274 } else if (op->IsShr()) {
4275 __ sarl(first_reg, imm);
4276 } else {
4277 __ shrl(first_reg, imm);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004278 }
4279 }
4280 break;
4281 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004282 case DataType::Type::kInt64: {
Mark P Mendell73945692015-04-29 14:56:17 +00004283 if (second.IsRegister()) {
4284 Register second_reg = second.AsRegister<Register>();
4285 DCHECK_EQ(ECX, second_reg);
4286 if (op->IsShl()) {
4287 GenerateShlLong(first, second_reg);
4288 } else if (op->IsShr()) {
4289 GenerateShrLong(first, second_reg);
4290 } else {
4291 GenerateUShrLong(first, second_reg);
4292 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004293 } else {
Mark P Mendell73945692015-04-29 14:56:17 +00004294 // Shift by a constant.
Roland Levillain5b5b9312016-03-22 14:57:31 +00004295 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00004296 // Nothing to do if the shift is 0, as the input is already the output.
4297 if (shift != 0) {
4298 if (op->IsShl()) {
4299 GenerateShlLong(first, shift);
4300 } else if (op->IsShr()) {
4301 GenerateShrLong(first, shift);
4302 } else {
4303 GenerateUShrLong(first, shift);
4304 }
4305 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00004306 }
4307 break;
4308 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00004309 default:
4310 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
4311 }
4312}
4313
Mark P Mendell73945692015-04-29 14:56:17 +00004314void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, int shift) {
4315 Register low = loc.AsRegisterPairLow<Register>();
4316 Register high = loc.AsRegisterPairHigh<Register>();
Mark Mendellba56d062015-05-05 21:34:03 -04004317 if (shift == 1) {
4318 // This is just an addition.
4319 __ addl(low, low);
4320 __ adcl(high, high);
4321 } else if (shift == 32) {
Mark P Mendell73945692015-04-29 14:56:17 +00004322 // Shift by 32 is easy. High gets low, and low gets 0.
4323 codegen_->EmitParallelMoves(
4324 loc.ToLow(),
4325 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004326 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004327 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4328 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004329 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004330 } else if (shift > 32) {
4331 // Low part becomes 0. High part is low part << (shift-32).
4332 __ movl(high, low);
4333 __ shll(high, Immediate(shift - 32));
4334 __ xorl(low, low);
4335 } else {
4336 // Between 1 and 31.
4337 __ shld(high, low, Immediate(shift));
4338 __ shll(low, Immediate(shift));
4339 }
4340}
4341
Calin Juravle9aec02f2014-11-18 23:06:35 +00004342void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004343 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004344 __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
4345 __ shll(loc.AsRegisterPairLow<Register>(), shifter);
4346 __ testl(shifter, Immediate(32));
4347 __ j(kEqual, &done);
4348 __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
4349 __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
4350 __ Bind(&done);
4351}
4352
Mark P Mendell73945692015-04-29 14:56:17 +00004353void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, int shift) {
4354 Register low = loc.AsRegisterPairLow<Register>();
4355 Register high = loc.AsRegisterPairHigh<Register>();
4356 if (shift == 32) {
4357 // Need to copy the sign.
4358 DCHECK_NE(low, high);
4359 __ movl(low, high);
4360 __ sarl(high, Immediate(31));
4361 } else if (shift > 32) {
4362 DCHECK_NE(low, high);
4363 // High part becomes sign. Low part is shifted by shift - 32.
4364 __ movl(low, high);
4365 __ sarl(high, Immediate(31));
4366 __ sarl(low, Immediate(shift - 32));
4367 } else {
4368 // Between 1 and 31.
4369 __ shrd(low, high, Immediate(shift));
4370 __ sarl(high, Immediate(shift));
4371 }
4372}
4373
Calin Juravle9aec02f2014-11-18 23:06:35 +00004374void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004375 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004376 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4377 __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
4378 __ testl(shifter, Immediate(32));
4379 __ j(kEqual, &done);
4380 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4381 __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
4382 __ Bind(&done);
4383}
4384
Mark P Mendell73945692015-04-29 14:56:17 +00004385void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, int shift) {
4386 Register low = loc.AsRegisterPairLow<Register>();
4387 Register high = loc.AsRegisterPairHigh<Register>();
4388 if (shift == 32) {
4389 // Shift by 32 is easy. Low gets high, and high gets 0.
4390 codegen_->EmitParallelMoves(
4391 loc.ToHigh(),
4392 loc.ToLow(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004393 DataType::Type::kInt32,
Mark P Mendell73945692015-04-29 14:56:17 +00004394 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4395 loc.ToHigh(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004396 DataType::Type::kInt32);
Mark P Mendell73945692015-04-29 14:56:17 +00004397 } else if (shift > 32) {
4398 // Low part is high >> (shift - 32). High part becomes 0.
4399 __ movl(low, high);
4400 __ shrl(low, Immediate(shift - 32));
4401 __ xorl(high, high);
4402 } else {
4403 // Between 1 and 31.
4404 __ shrd(low, high, Immediate(shift));
4405 __ shrl(high, Immediate(shift));
4406 }
4407}
4408
Calin Juravle9aec02f2014-11-18 23:06:35 +00004409void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004410 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004411 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4412 __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
4413 __ testl(shifter, Immediate(32));
4414 __ j(kEqual, &done);
4415 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4416 __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
4417 __ Bind(&done);
4418}
4419
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004420void LocationsBuilderX86::VisitRor(HRor* ror) {
4421 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004422 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004423
4424 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004425 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004426 // Add the temporary needed.
4427 locations->AddTemp(Location::RequiresRegister());
4428 FALLTHROUGH_INTENDED;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004429 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004430 locations->SetInAt(0, Location::RequiresRegister());
4431 // The shift count needs to be in CL (unless it is a constant).
4432 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, ror->InputAt(1)));
4433 locations->SetOut(Location::SameAsFirstInput());
4434 break;
4435 default:
4436 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4437 UNREACHABLE();
4438 }
4439}
4440
4441void InstructionCodeGeneratorX86::VisitRor(HRor* ror) {
4442 LocationSummary* locations = ror->GetLocations();
4443 Location first = locations->InAt(0);
4444 Location second = locations->InAt(1);
4445
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004446 if (ror->GetResultType() == DataType::Type::kInt32) {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004447 Register first_reg = first.AsRegister<Register>();
4448 if (second.IsRegister()) {
4449 Register second_reg = second.AsRegister<Register>();
4450 __ rorl(first_reg, second_reg);
4451 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004452 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004453 __ rorl(first_reg, imm);
4454 }
4455 return;
4456 }
4457
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004458 DCHECK_EQ(ror->GetResultType(), DataType::Type::kInt64);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004459 Register first_reg_lo = first.AsRegisterPairLow<Register>();
4460 Register first_reg_hi = first.AsRegisterPairHigh<Register>();
4461 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
4462 if (second.IsRegister()) {
4463 Register second_reg = second.AsRegister<Register>();
4464 DCHECK_EQ(second_reg, ECX);
4465 __ movl(temp_reg, first_reg_hi);
4466 __ shrd(first_reg_hi, first_reg_lo, second_reg);
4467 __ shrd(first_reg_lo, temp_reg, second_reg);
4468 __ movl(temp_reg, first_reg_hi);
4469 __ testl(second_reg, Immediate(32));
4470 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
4471 __ cmovl(kNotEqual, first_reg_lo, temp_reg);
4472 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004473 int32_t shift_amt = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004474 if (shift_amt == 0) {
4475 // Already fine.
4476 return;
4477 }
4478 if (shift_amt == 32) {
4479 // Just swap.
4480 __ movl(temp_reg, first_reg_lo);
4481 __ movl(first_reg_lo, first_reg_hi);
4482 __ movl(first_reg_hi, temp_reg);
4483 return;
4484 }
4485
4486 Immediate imm(shift_amt);
4487 // Save the constents of the low value.
4488 __ movl(temp_reg, first_reg_lo);
4489
4490 // Shift right into low, feeding bits from high.
4491 __ shrd(first_reg_lo, first_reg_hi, imm);
4492
4493 // Shift right into high, feeding bits from the original low.
4494 __ shrd(first_reg_hi, temp_reg, imm);
4495
4496 // Swap if needed.
4497 if (shift_amt > 32) {
4498 __ movl(temp_reg, first_reg_lo);
4499 __ movl(first_reg_lo, first_reg_hi);
4500 __ movl(first_reg_hi, temp_reg);
4501 }
4502 }
4503}
4504
Calin Juravle9aec02f2014-11-18 23:06:35 +00004505void LocationsBuilderX86::VisitShl(HShl* shl) {
4506 HandleShift(shl);
4507}
4508
4509void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
4510 HandleShift(shl);
4511}
4512
4513void LocationsBuilderX86::VisitShr(HShr* shr) {
4514 HandleShift(shr);
4515}
4516
4517void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
4518 HandleShift(shr);
4519}
4520
4521void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
4522 HandleShift(ushr);
4523}
4524
4525void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
4526 HandleShift(ushr);
4527}
4528
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004529void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004530 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4531 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004532 locations->SetOut(Location::RegisterLocation(EAX));
Alex Lightd109e302018-06-27 10:25:41 -07004533 InvokeRuntimeCallingConvention calling_convention;
4534 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004535}
4536
4537void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004538 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4539 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4540 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004541}
4542
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004543void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004544 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4545 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004546 locations->SetOut(Location::RegisterLocation(EAX));
4547 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004548 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4549 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004550}
4551
4552void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004553 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4554 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayd0958442017-01-30 14:57:16 +00004555 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004556 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004557 DCHECK(!codegen_->IsLeafMethod());
4558}
4559
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004560void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004561 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004562 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004563 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4564 if (location.IsStackSlot()) {
4565 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4566 } else if (location.IsDoubleStackSlot()) {
4567 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004568 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004569 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004570}
4571
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004572void InstructionCodeGeneratorX86::VisitParameterValue(
4573 HParameterValue* instruction ATTRIBUTE_UNUSED) {
4574}
4575
4576void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
4577 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004578 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004579 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4580}
4581
4582void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004583}
4584
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004585void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
4586 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004587 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004588 locations->SetInAt(0, Location::RequiresRegister());
4589 locations->SetOut(Location::RequiresRegister());
4590}
4591
4592void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) {
4593 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004594 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004595 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004596 instruction->GetIndex(), kX86PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004597 __ movl(locations->Out().AsRegister<Register>(),
4598 Address(locations->InAt(0).AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004599 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004600 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004601 instruction->GetIndex(), kX86PointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004602 __ movl(locations->Out().AsRegister<Register>(),
4603 Address(locations->InAt(0).AsRegister<Register>(),
4604 mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
4605 // temp = temp->GetImtEntryAt(method_offset);
4606 __ movl(locations->Out().AsRegister<Register>(),
4607 Address(locations->Out().AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004608 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004609}
4610
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004611void LocationsBuilderX86::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004612 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004613 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004614 locations->SetInAt(0, Location::RequiresRegister());
4615 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004616}
4617
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004618void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
4619 LocationSummary* locations = not_->GetLocations();
Roland Levillain70566432014-10-24 16:20:17 +01004620 Location in = locations->InAt(0);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004621 Location out = locations->Out();
Roland Levillain70566432014-10-24 16:20:17 +01004622 DCHECK(in.Equals(out));
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004623 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004624 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004625 __ notl(out.AsRegister<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004626 break;
4627
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004628 case DataType::Type::kInt64:
Roland Levillain70566432014-10-24 16:20:17 +01004629 __ notl(out.AsRegisterPairLow<Register>());
4630 __ notl(out.AsRegisterPairHigh<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004631 break;
4632
4633 default:
4634 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4635 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004636}
4637
David Brazdil66d126e2015-04-03 16:02:44 +01004638void LocationsBuilderX86::VisitBooleanNot(HBooleanNot* bool_not) {
4639 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004640 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004641 locations->SetInAt(0, Location::RequiresRegister());
4642 locations->SetOut(Location::SameAsFirstInput());
4643}
4644
4645void InstructionCodeGeneratorX86::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004646 LocationSummary* locations = bool_not->GetLocations();
4647 Location in = locations->InAt(0);
4648 Location out = locations->Out();
4649 DCHECK(in.Equals(out));
4650 __ xorl(out.AsRegister<Register>(), Immediate(1));
4651}
4652
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004653void LocationsBuilderX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004654 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004655 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00004656 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004657 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004658 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004659 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004660 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004661 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004662 case DataType::Type::kInt32:
4663 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00004664 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravleddb7df22014-11-25 20:56:51 +00004665 locations->SetInAt(1, Location::Any());
4666 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4667 break;
4668 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004669 case DataType::Type::kFloat32:
4670 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00004671 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004672 if (compare->InputAt(1)->IsX86LoadFromConstantTable()) {
4673 DCHECK(compare->InputAt(1)->IsEmittedAtUseSite());
4674 } else if (compare->InputAt(1)->IsConstant()) {
4675 locations->SetInAt(1, Location::RequiresFpuRegister());
4676 } else {
4677 locations->SetInAt(1, Location::Any());
4678 }
Calin Juravleddb7df22014-11-25 20:56:51 +00004679 locations->SetOut(Location::RequiresRegister());
4680 break;
4681 }
4682 default:
4683 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
4684 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004685}
4686
4687void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004688 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004689 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00004690 Location left = locations->InAt(0);
4691 Location right = locations->InAt(1);
4692
Mark Mendell0c9497d2015-08-21 09:30:05 -04004693 NearLabel less, greater, done;
Aart Bika19616e2016-02-01 18:57:58 -08004694 Condition less_cond = kLess;
4695
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004696 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004697 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004698 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004699 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004700 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004701 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004702 case DataType::Type::kInt32: {
Roland Levillain0b671c02016-08-19 12:02:34 +01004703 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08004704 break;
4705 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004706 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004707 Register left_low = left.AsRegisterPairLow<Register>();
4708 Register left_high = left.AsRegisterPairHigh<Register>();
4709 int32_t val_low = 0;
4710 int32_t val_high = 0;
4711 bool right_is_const = false;
4712
4713 if (right.IsConstant()) {
4714 DCHECK(right.GetConstant()->IsLongConstant());
4715 right_is_const = true;
4716 int64_t val = right.GetConstant()->AsLongConstant()->GetValue();
4717 val_low = Low32Bits(val);
4718 val_high = High32Bits(val);
4719 }
4720
Calin Juravleddb7df22014-11-25 20:56:51 +00004721 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004722 __ cmpl(left_high, right.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004723 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004724 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004725 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004726 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004727 codegen_->Compare32BitValue(left_high, val_high);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004728 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004729 __ j(kLess, &less); // Signed compare.
4730 __ j(kGreater, &greater); // Signed compare.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004731 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004732 __ cmpl(left_low, right.AsRegisterPairLow<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004733 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004734 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004735 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004736 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004737 codegen_->Compare32BitValue(left_low, val_low);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004738 }
Aart Bika19616e2016-02-01 18:57:58 -08004739 less_cond = kBelow; // for CF (unsigned).
Calin Juravleddb7df22014-11-25 20:56:51 +00004740 break;
4741 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004742 case DataType::Type::kFloat32: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004743 GenerateFPCompare(left, right, compare, false);
Calin Juravleddb7df22014-11-25 20:56:51 +00004744 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004745 less_cond = kBelow; // for CF (floats).
Calin Juravleddb7df22014-11-25 20:56:51 +00004746 break;
4747 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004748 case DataType::Type::kFloat64: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004749 GenerateFPCompare(left, right, compare, true);
Calin Juravleddb7df22014-11-25 20:56:51 +00004750 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004751 less_cond = kBelow; // for CF (floats).
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004752 break;
4753 }
4754 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00004755 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004756 }
Aart Bika19616e2016-02-01 18:57:58 -08004757
Calin Juravleddb7df22014-11-25 20:56:51 +00004758 __ movl(out, Immediate(0));
4759 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08004760 __ j(less_cond, &less);
Calin Juravleddb7df22014-11-25 20:56:51 +00004761
4762 __ Bind(&greater);
4763 __ movl(out, Immediate(1));
4764 __ jmp(&done);
4765
4766 __ Bind(&less);
4767 __ movl(out, Immediate(-1));
4768
4769 __ Bind(&done);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004770}
4771
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004772void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004773 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004774 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004775 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01004776 locations->SetInAt(i, Location::Any());
4777 }
4778 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004779}
4780
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004781void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01004782 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004783}
4784
Roland Levillain7c1559a2015-12-15 10:55:36 +00004785void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004786 /*
4787 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
4788 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86 memory model.
4789 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4790 */
4791 switch (kind) {
4792 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004793 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004794 break;
4795 }
4796 case MemBarrierKind::kAnyStore:
4797 case MemBarrierKind::kLoadAny:
4798 case MemBarrierKind::kStoreStore: {
4799 // nop
4800 break;
4801 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004802 case MemBarrierKind::kNTStoreStore:
4803 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004804 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004805 break;
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004806 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004807}
4808
Vladimir Markodc151b22015-10-15 18:02:30 +01004809HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
4810 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +01004811 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004812 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01004813}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004814
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004815Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
4816 Register temp) {
4817 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Markoc53c0792015-11-19 15:48:33 +00004818 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004819 if (!invoke->GetLocations()->Intrinsified()) {
4820 return location.AsRegister<Register>();
4821 }
4822 // For intrinsics we allow any location, so it may be on the stack.
4823 if (!location.IsRegister()) {
4824 __ movl(temp, Address(ESP, location.GetStackIndex()));
4825 return temp;
4826 }
4827 // For register locations, check if the register was saved. If so, get it from the stack.
4828 // Note: There is a chance that the register was saved but not overwritten, so we could
4829 // save one load. However, since this is just an intrinsic slow path we prefer this
4830 // simple and more robust approach rather that trying to determine if that's the case.
4831 SlowPathCode* slow_path = GetCurrentSlowPath();
Vladimir Marko4ee8e292017-06-02 15:39:30 +00004832 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
4833 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
4834 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
4835 __ movl(temp, Address(ESP, stack_offset));
4836 return temp;
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004837 }
4838 return location.AsRegister<Register>();
4839}
4840
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004841void CodeGeneratorX86::GenerateStaticOrDirectCall(
4842 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Vladimir Marko58155012015-08-19 12:49:41 +00004843 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4844 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004845 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00004846 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004847 uint32_t offset =
4848 GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
4849 __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004850 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004851 }
Vladimir Marko58155012015-08-19 12:49:41 +00004852 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004853 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004854 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004855 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4856 DCHECK(GetCompilerOptions().IsBootImage());
4857 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4858 temp.AsRegister<Register>());
4859 __ leal(temp.AsRegister<Register>(), Address(base_reg, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004860 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01004861 break;
4862 }
Vladimir Markob066d432018-01-03 13:14:37 +00004863 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4864 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4865 temp.AsRegister<Register>());
4866 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
4867 RecordBootImageRelRoPatch(
4868 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress(),
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004869 GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00004870 break;
4871 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004872 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004873 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4874 temp.AsRegister<Register>());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004875 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004876 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01004877 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004878 break;
4879 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004880 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4881 __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
4882 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004883 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4884 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4885 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01004886 }
Vladimir Marko58155012015-08-19 12:49:41 +00004887 }
4888
4889 switch (invoke->GetCodePtrLocation()) {
4890 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4891 __ call(GetFrameEntryLabel());
4892 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004893 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4894 // (callee_method + offset_of_quick_compiled_code)()
4895 __ call(Address(callee_method.AsRegister<Register>(),
4896 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07004897 kX86PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004898 break;
Mark Mendell09ed1a32015-03-25 08:30:06 -04004899 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004900 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Mark Mendell09ed1a32015-03-25 08:30:06 -04004901
4902 DCHECK(!IsLeafMethod());
Mark Mendell09ed1a32015-03-25 08:30:06 -04004903}
4904
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004905void CodeGeneratorX86::GenerateVirtualCall(
4906 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004907 Register temp = temp_in.AsRegister<Register>();
4908 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4909 invoke->GetVTableIndex(), kX86PointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004910
4911 // Use the calling convention instead of the location of the receiver, as
4912 // intrinsics may have put the receiver in a different register. In the intrinsics
4913 // slow path, the arguments have been moved to the right place, so here we are
4914 // guaranteed that the receiver is the first register of the calling convention.
4915 InvokeDexCallingConvention calling_convention;
4916 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004917 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004918 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004919 __ movl(temp, Address(receiver, class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004920 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004921 // Instead of simply (possibly) unpoisoning `temp` here, we should
4922 // emit a read barrier for the previous class reference load.
4923 // However this is not required in practice, as this is an
4924 // intermediate/temporary reference and because the current
4925 // concurrent copying collector keeps the from-space memory
4926 // intact/accessible until the end of the marking phase (the
4927 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004928 __ MaybeUnpoisonHeapReference(temp);
4929 // temp = temp->GetMethodAt(method_offset);
4930 __ movl(temp, Address(temp, method_offset));
4931 // call temp->GetEntryPoint();
4932 __ call(Address(
Andreas Gampe542451c2016-07-26 09:02:02 -07004933 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004934 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004935}
4936
Vladimir Marko6fd16062018-06-26 11:02:04 +01004937void CodeGeneratorX86::RecordBootImageIntrinsicPatch(HX86ComputeBaseMethodAddress* method_address,
4938 uint32_t intrinsic_data) {
4939 boot_image_intrinsic_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08004940 method_address, /* target_dex_file= */ nullptr, intrinsic_data);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004941 __ Bind(&boot_image_intrinsic_patches_.back().label);
4942}
4943
Vladimir Markob066d432018-01-03 13:14:37 +00004944void CodeGeneratorX86::RecordBootImageRelRoPatch(HX86ComputeBaseMethodAddress* method_address,
4945 uint32_t boot_image_offset) {
4946 boot_image_method_patches_.emplace_back(
Andreas Gampe3db70682018-12-26 15:12:03 -08004947 method_address, /* target_dex_file= */ nullptr, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004948 __ Bind(&boot_image_method_patches_.back().label);
4949}
4950
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004951void CodeGeneratorX86::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
Vladimir Marko65979462017-05-19 17:25:12 +01004952 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004953 HX86ComputeBaseMethodAddress* method_address =
Vladimir Marko65979462017-05-19 17:25:12 +01004954 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004955 boot_image_method_patches_.emplace_back(
4956 method_address, invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01004957 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004958}
4959
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004960void CodeGeneratorX86::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
4961 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
4962 HX86ComputeBaseMethodAddress* method_address =
4963 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004964 // Add the patch entry and bind its label at the end of the instruction.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004965 method_bss_entry_patches_.emplace_back(
4966 method_address, &GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4967 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004968}
4969
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004970void CodeGeneratorX86::RecordBootImageTypePatch(HLoadClass* load_class) {
4971 HX86ComputeBaseMethodAddress* method_address =
4972 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
4973 boot_image_type_patches_.emplace_back(
4974 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004975 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004976}
4977
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004978Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004979 HX86ComputeBaseMethodAddress* method_address =
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004980 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
4981 type_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004982 method_address, &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004983 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004984}
4985
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004986void CodeGeneratorX86::RecordBootImageStringPatch(HLoadString* load_string) {
4987 HX86ComputeBaseMethodAddress* method_address =
4988 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
4989 boot_image_string_patches_.emplace_back(
4990 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
4991 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01004992}
4993
Vladimir Markoaad75c62016-10-03 08:46:48 +00004994Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004995 HX86ComputeBaseMethodAddress* method_address =
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004996 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004997 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004998 method_address, &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004999 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005000}
5001
Vladimir Markoeebb8212018-06-05 14:57:24 +01005002void CodeGeneratorX86::LoadBootImageAddress(Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01005003 uint32_t boot_image_reference,
Vladimir Markoeebb8212018-06-05 14:57:24 +01005004 HInvokeStaticOrDirect* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01005005 if (GetCompilerOptions().IsBootImage()) {
5006 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5007 HX86ComputeBaseMethodAddress* method_address =
5008 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5009 DCHECK(method_address != nullptr);
5010 Register method_address_reg =
5011 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5012 __ leal(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
5013 RecordBootImageIntrinsicPatch(method_address, boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01005014 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01005015 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5016 HX86ComputeBaseMethodAddress* method_address =
5017 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5018 DCHECK(method_address != nullptr);
5019 Register method_address_reg =
5020 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5021 __ movl(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko6fd16062018-06-26 11:02:04 +01005022 RecordBootImageRelRoPatch(method_address, boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01005023 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005024 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01005025 gc::Heap* heap = Runtime::Current()->GetHeap();
5026 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01005027 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01005028 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
5029 }
5030}
5031
Vladimir Marko6fd16062018-06-26 11:02:04 +01005032void CodeGeneratorX86::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
5033 uint32_t boot_image_offset) {
5034 DCHECK(invoke->IsStatic());
5035 InvokeRuntimeCallingConvention calling_convention;
5036 Register argument = calling_convention.GetRegisterAt(0);
5037 if (GetCompilerOptions().IsBootImage()) {
5038 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
5039 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
5040 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
5041 HX86ComputeBaseMethodAddress* method_address =
5042 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
5043 DCHECK(method_address != nullptr);
5044 Register method_address_reg =
5045 invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>();
5046 __ leal(argument, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset));
5047 MethodReference target_method = invoke->GetTargetMethod();
5048 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
5049 boot_image_type_patches_.emplace_back(method_address, target_method.dex_file, type_idx.index_);
5050 __ Bind(&boot_image_type_patches_.back().label);
5051 } else {
5052 LoadBootImageAddress(argument, boot_image_offset, invoke);
5053 }
5054 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
5055 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
5056}
5057
Vladimir Markoaad75c62016-10-03 08:46:48 +00005058// The label points to the end of the "movl" or another instruction but the literal offset
5059// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
5060constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
5061
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005062template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00005063inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005064 const ArenaDeque<X86PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005065 ArenaVector<linker::LinkerPatch>* linker_patches) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00005066 for (const X86PcRelativePatchInfo& info : infos) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005067 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005068 linker_patches->push_back(Factory(literal_offset,
5069 info.target_dex_file,
5070 GetMethodAddressOffset(info.method_address),
5071 info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005072 }
5073}
5074
Vladimir Marko6fd16062018-06-26 11:02:04 +01005075template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
5076linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
5077 const DexFile* target_dex_file,
5078 uint32_t pc_insn_offset,
5079 uint32_t boot_image_offset) {
5080 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
5081 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00005082}
5083
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005084void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00005085 DCHECK(linker_patches->empty());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005086 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01005087 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005088 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00005089 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01005090 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005091 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01005092 string_bss_entry_patches_.size() +
5093 boot_image_intrinsic_patches_.size();
Vladimir Marko0f7dca42015-11-02 14:36:43 +00005094 linker_patches->reserve(size);
Vladimir Marko764d4542017-05-16 10:31:41 +01005095 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005096 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
5097 boot_image_method_patches_, linker_patches);
5098 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
5099 boot_image_type_patches_, linker_patches);
5100 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005101 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01005102 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
5103 boot_image_intrinsic_patches_, linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005104 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01005105 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00005106 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005107 DCHECK(boot_image_type_patches_.empty());
5108 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01005109 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005110 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01005111 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
5112 method_bss_entry_patches_, linker_patches);
5113 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
5114 type_bss_entry_patches_, linker_patches);
5115 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
5116 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00005117 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00005118}
5119
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005120void CodeGeneratorX86::MarkGCCard(Register temp,
5121 Register card,
5122 Register object,
5123 Register value,
5124 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005125 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005126 if (value_can_be_null) {
5127 __ testl(value, value);
5128 __ j(kEqual, &is_null);
5129 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005130 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005131 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01005132 // Calculate the offset (in the card table) of the card corresponding to
5133 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005134 __ movl(temp, object);
5135 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005136 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5137 // `object`'s card.
5138 //
5139 // Register `card` contains the address of the card table. Note that the card
5140 // table's base is biased during its creation so that it always starts at an
5141 // address whose least-significant byte is equal to `kCardDirty` (see
5142 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5143 // below writes the `kCardDirty` (byte) value into the `object`'s card
5144 // (located at `card + object >> kCardShift`).
5145 //
5146 // This dual use of the value in register `card` (1. to calculate the location
5147 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5148 // (no need to explicitly load `kCardDirty` as an immediate value).
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00005149 __ movb(Address(temp, card, TIMES_1, 0),
5150 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005151 if (value_can_be_null) {
5152 __ Bind(&is_null);
5153 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005154}
5155
Calin Juravle52c48962014-12-16 17:02:57 +00005156void LocationsBuilderX86::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
5157 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005158
5159 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005160 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005161 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005162 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5163 kEmitCompilerReadBarrier
5164 ? LocationSummary::kCallOnSlowPath
5165 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005166 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005167 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005168 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005169 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005170
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005171 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005172 locations->SetOut(Location::RequiresFpuRegister());
5173 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005174 // The output overlaps in case of long: we don't want the low move
5175 // to overwrite the object's location. Likewise, in the case of
5176 // an object field get with read barriers enabled, we do not want
5177 // the move to overwrite the object's location, as we need it to emit
5178 // the read barrier.
5179 locations->SetOut(
5180 Location::RequiresRegister(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005181 (object_field_get_with_read_barrier || instruction->GetType() == DataType::Type::kInt64) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005182 Location::kOutputOverlap :
5183 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005184 }
Calin Juravle52c48962014-12-16 17:02:57 +00005185
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005186 if (field_info.IsVolatile() && (field_info.GetFieldType() == DataType::Type::kInt64)) {
Calin Juravle52c48962014-12-16 17:02:57 +00005187 // Long values can be loaded atomically into an XMM using movsd.
Roland Levillain7c1559a2015-12-15 10:55:36 +00005188 // So we use an XMM register as a temp to achieve atomicity (first
5189 // load the temp into the XMM and then copy the XMM into the
5190 // output, 32 bits at a time).
Calin Juravle52c48962014-12-16 17:02:57 +00005191 locations->AddTemp(Location::RequiresFpuRegister());
5192 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005193}
5194
Calin Juravle52c48962014-12-16 17:02:57 +00005195void InstructionCodeGeneratorX86::HandleFieldGet(HInstruction* instruction,
5196 const FieldInfo& field_info) {
5197 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005198
Calin Juravle52c48962014-12-16 17:02:57 +00005199 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005200 Location base_loc = locations->InAt(0);
5201 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00005202 Location out = locations->Out();
5203 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01005204 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
5205 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00005206 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5207
Vladimir Marko61b92282017-10-11 13:23:17 +01005208 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005209 case DataType::Type::kBool:
5210 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005211 __ movzxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005212 break;
5213 }
5214
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005215 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005216 __ movsxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005217 break;
5218 }
5219
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005220 case DataType::Type::kUint16: {
5221 __ movzxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005222 break;
5223 }
5224
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005225 case DataType::Type::kInt16: {
5226 __ movsxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005227 break;
5228 }
5229
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005230 case DataType::Type::kInt32:
Calin Juravle52c48962014-12-16 17:02:57 +00005231 __ movl(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005232 break;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005233
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005234 case DataType::Type::kReference: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005235 // /* HeapReference<Object> */ out = *(base + offset)
5236 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005237 // Note that a potential implicit null check is handled in this
5238 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
5239 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005240 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005241 if (is_volatile) {
5242 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5243 }
5244 } else {
5245 __ movl(out.AsRegister<Register>(), Address(base, offset));
5246 codegen_->MaybeRecordImplicitNullCheck(instruction);
5247 if (is_volatile) {
5248 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5249 }
5250 // If read barriers are enabled, emit read barriers other than
5251 // Baker's using a slow path (and also unpoison the loaded
5252 // reference, if heap poisoning is enabled).
5253 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
5254 }
5255 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005256 }
5257
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005258 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005259 if (is_volatile) {
5260 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
5261 __ movsd(temp, Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005262 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005263 __ movd(out.AsRegisterPairLow<Register>(), temp);
5264 __ psrlq(temp, Immediate(32));
5265 __ movd(out.AsRegisterPairHigh<Register>(), temp);
5266 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005267 DCHECK_NE(base, out.AsRegisterPairLow<Register>());
Calin Juravle52c48962014-12-16 17:02:57 +00005268 __ movl(out.AsRegisterPairLow<Register>(), Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005269 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005270 __ movl(out.AsRegisterPairHigh<Register>(), Address(base, kX86WordSize + offset));
5271 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005272 break;
5273 }
5274
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005275 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00005276 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005277 break;
5278 }
5279
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005280 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005281 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005282 break;
5283 }
5284
Aart Bik66c158e2018-01-31 12:55:04 -08005285 case DataType::Type::kUint32:
5286 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005287 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01005288 LOG(FATAL) << "Unreachable type " << load_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005289 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005290 }
Calin Juravle52c48962014-12-16 17:02:57 +00005291
Vladimir Marko61b92282017-10-11 13:23:17 +01005292 if (load_type == DataType::Type::kReference || load_type == DataType::Type::kInt64) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005293 // Potential implicit null checks, in the case of reference or
5294 // long fields, are handled in the previous switch statement.
5295 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005296 codegen_->MaybeRecordImplicitNullCheck(instruction);
5297 }
5298
Calin Juravle52c48962014-12-16 17:02:57 +00005299 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01005300 if (load_type == DataType::Type::kReference) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005301 // Memory barriers, in the case of references, are also handled
5302 // in the previous switch statement.
5303 } else {
5304 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5305 }
Roland Levillain4d027112015-07-01 15:41:14 +01005306 }
Calin Juravle52c48962014-12-16 17:02:57 +00005307}
5308
5309void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
5310 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5311
5312 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005313 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00005314 locations->SetInAt(0, Location::RequiresRegister());
5315 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005316 DataType::Type field_type = field_info.GetFieldType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005317 bool is_byte_type = DataType::Size(field_type) == 1u;
Calin Juravle52c48962014-12-16 17:02:57 +00005318
5319 // The register allocator does not support multiple
5320 // inputs that die at entry with one in a specific register.
5321 if (is_byte_type) {
5322 // Ensure the value is in a byte register.
5323 locations->SetInAt(1, Location::RegisterLocation(EAX));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005324 } else if (DataType::IsFloatingPointType(field_type)) {
5325 if (is_volatile && field_type == DataType::Type::kFloat64) {
Mark Mendell81489372015-11-04 11:30:41 -05005326 // In order to satisfy the semantics of volatile, this must be a single instruction store.
5327 locations->SetInAt(1, Location::RequiresFpuRegister());
5328 } else {
5329 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
5330 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005331 } else if (is_volatile && field_type == DataType::Type::kInt64) {
Mark Mendell81489372015-11-04 11:30:41 -05005332 // In order to satisfy the semantics of volatile, this must be a single instruction store.
Calin Juravle52c48962014-12-16 17:02:57 +00005333 locations->SetInAt(1, Location::RequiresRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005334
Calin Juravle52c48962014-12-16 17:02:57 +00005335 // 64bits value can be atomically written to an address with movsd and an XMM register.
5336 // We need two XMM registers because there's no easier way to (bit) copy a register pair
5337 // into a single XMM register (we copy each pair part into the XMMs and then interleave them).
5338 // NB: We could make the register allocator understand fp_reg <-> core_reg moves but given the
5339 // isolated cases when we need this it isn't worth adding the extra complexity.
5340 locations->AddTemp(Location::RequiresFpuRegister());
5341 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendell81489372015-11-04 11:30:41 -05005342 } else {
5343 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5344
5345 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
5346 // Temporary registers for the write barrier.
5347 locations->AddTemp(Location::RequiresRegister()); // May be used for reference poisoning too.
5348 // Ensure the card is in a byte register.
5349 locations->AddTemp(Location::RegisterLocation(ECX));
5350 }
Calin Juravle52c48962014-12-16 17:02:57 +00005351 }
5352}
5353
5354void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005355 const FieldInfo& field_info,
5356 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00005357 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5358
5359 LocationSummary* locations = instruction->GetLocations();
5360 Register base = locations->InAt(0).AsRegister<Register>();
5361 Location value = locations->InAt(1);
5362 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005363 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00005364 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01005365 bool needs_write_barrier =
5366 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00005367
5368 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005369 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00005370 }
5371
Mark Mendell81489372015-11-04 11:30:41 -05005372 bool maybe_record_implicit_null_check_done = false;
5373
Calin Juravle52c48962014-12-16 17:02:57 +00005374 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005375 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005376 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005377 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00005378 __ movb(Address(base, offset), value.AsRegister<ByteRegister>());
5379 break;
5380 }
5381
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005382 case DataType::Type::kUint16:
5383 case DataType::Type::kInt16: {
Mark Mendell81489372015-11-04 11:30:41 -05005384 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005385 __ movw(Address(base, offset),
5386 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell81489372015-11-04 11:30:41 -05005387 } else {
5388 __ movw(Address(base, offset), value.AsRegister<Register>());
5389 }
Calin Juravle52c48962014-12-16 17:02:57 +00005390 break;
5391 }
5392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005393 case DataType::Type::kInt32:
5394 case DataType::Type::kReference: {
Roland Levillain4d027112015-07-01 15:41:14 +01005395 if (kPoisonHeapReferences && needs_write_barrier) {
5396 // Note that in the case where `value` is a null reference,
5397 // we do not enter this block, as the reference does not
5398 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005399 DCHECK_EQ(field_type, DataType::Type::kReference);
Roland Levillain4d027112015-07-01 15:41:14 +01005400 Register temp = locations->GetTemp(0).AsRegister<Register>();
5401 __ movl(temp, value.AsRegister<Register>());
5402 __ PoisonHeapReference(temp);
5403 __ movl(Address(base, offset), temp);
Mark Mendell81489372015-11-04 11:30:41 -05005404 } else if (value.IsConstant()) {
5405 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5406 __ movl(Address(base, offset), Immediate(v));
Roland Levillain4d027112015-07-01 15:41:14 +01005407 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +00005408 DCHECK(value.IsRegister()) << value;
Roland Levillain4d027112015-07-01 15:41:14 +01005409 __ movl(Address(base, offset), value.AsRegister<Register>());
5410 }
Calin Juravle52c48962014-12-16 17:02:57 +00005411 break;
5412 }
5413
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005414 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00005415 if (is_volatile) {
5416 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
5417 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
5418 __ movd(temp1, value.AsRegisterPairLow<Register>());
5419 __ movd(temp2, value.AsRegisterPairHigh<Register>());
5420 __ punpckldq(temp1, temp2);
5421 __ movsd(Address(base, offset), temp1);
Calin Juravle77520bc2015-01-12 18:45:46 +00005422 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell81489372015-11-04 11:30:41 -05005423 } else if (value.IsConstant()) {
5424 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5425 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5426 codegen_->MaybeRecordImplicitNullCheck(instruction);
5427 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
Calin Juravle52c48962014-12-16 17:02:57 +00005428 } else {
5429 __ movl(Address(base, offset), value.AsRegisterPairLow<Register>());
Calin Juravle77520bc2015-01-12 18:45:46 +00005430 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005431 __ movl(Address(base, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
5432 }
Mark Mendell81489372015-11-04 11:30:41 -05005433 maybe_record_implicit_null_check_done = true;
Calin Juravle52c48962014-12-16 17:02:57 +00005434 break;
5435 }
5436
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005437 case DataType::Type::kFloat32: {
Mark Mendell81489372015-11-04 11:30:41 -05005438 if (value.IsConstant()) {
5439 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5440 __ movl(Address(base, offset), Immediate(v));
5441 } else {
5442 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5443 }
Calin Juravle52c48962014-12-16 17:02:57 +00005444 break;
5445 }
5446
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005447 case DataType::Type::kFloat64: {
Mark Mendell81489372015-11-04 11:30:41 -05005448 if (value.IsConstant()) {
5449 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5450 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5451 codegen_->MaybeRecordImplicitNullCheck(instruction);
5452 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
5453 maybe_record_implicit_null_check_done = true;
5454 } else {
5455 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5456 }
Calin Juravle52c48962014-12-16 17:02:57 +00005457 break;
5458 }
5459
Aart Bik66c158e2018-01-31 12:55:04 -08005460 case DataType::Type::kUint32:
5461 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005462 case DataType::Type::kVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00005463 LOG(FATAL) << "Unreachable type " << field_type;
5464 UNREACHABLE();
5465 }
5466
Mark Mendell81489372015-11-04 11:30:41 -05005467 if (!maybe_record_implicit_null_check_done) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005468 codegen_->MaybeRecordImplicitNullCheck(instruction);
5469 }
5470
Roland Levillain4d027112015-07-01 15:41:14 +01005471 if (needs_write_barrier) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005472 Register temp = locations->GetTemp(0).AsRegister<Register>();
5473 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005474 codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005475 }
5476
Calin Juravle52c48962014-12-16 17:02:57 +00005477 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005478 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005479 }
5480}
5481
5482void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5483 HandleFieldGet(instruction, instruction->GetFieldInfo());
5484}
5485
5486void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5487 HandleFieldGet(instruction, instruction->GetFieldInfo());
5488}
5489
5490void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5491 HandleFieldSet(instruction, instruction->GetFieldInfo());
5492}
5493
5494void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005495 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005496}
5497
5498void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5499 HandleFieldSet(instruction, instruction->GetFieldInfo());
5500}
5501
5502void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005503 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005504}
5505
5506void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5507 HandleFieldGet(instruction, instruction->GetFieldInfo());
5508}
5509
5510void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5511 HandleFieldGet(instruction, instruction->GetFieldInfo());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005512}
5513
Vladimir Marko552a1342017-10-31 10:56:47 +00005514void LocationsBuilderX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5515 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(EAX));
5516}
5517
5518void InstructionCodeGeneratorX86::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
5519 __ movl(EAX, Immediate(instruction->GetFormat()->GetValue()));
5520 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
5521}
5522
Calin Juravlee460d1d2015-09-29 04:52:17 +01005523void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
5524 HUnresolvedInstanceFieldGet* instruction) {
5525 FieldAccessCallingConventionX86 calling_convention;
5526 codegen_->CreateUnresolvedFieldLocationSummary(
5527 instruction, instruction->GetFieldType(), calling_convention);
5528}
5529
5530void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
5531 HUnresolvedInstanceFieldGet* instruction) {
5532 FieldAccessCallingConventionX86 calling_convention;
5533 codegen_->GenerateUnresolvedFieldAccess(instruction,
5534 instruction->GetFieldType(),
5535 instruction->GetFieldIndex(),
5536 instruction->GetDexPc(),
5537 calling_convention);
5538}
5539
5540void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
5541 HUnresolvedInstanceFieldSet* instruction) {
5542 FieldAccessCallingConventionX86 calling_convention;
5543 codegen_->CreateUnresolvedFieldLocationSummary(
5544 instruction, instruction->GetFieldType(), calling_convention);
5545}
5546
5547void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
5548 HUnresolvedInstanceFieldSet* instruction) {
5549 FieldAccessCallingConventionX86 calling_convention;
5550 codegen_->GenerateUnresolvedFieldAccess(instruction,
5551 instruction->GetFieldType(),
5552 instruction->GetFieldIndex(),
5553 instruction->GetDexPc(),
5554 calling_convention);
5555}
5556
5557void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
5558 HUnresolvedStaticFieldGet* instruction) {
5559 FieldAccessCallingConventionX86 calling_convention;
5560 codegen_->CreateUnresolvedFieldLocationSummary(
5561 instruction, instruction->GetFieldType(), calling_convention);
5562}
5563
5564void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
5565 HUnresolvedStaticFieldGet* instruction) {
5566 FieldAccessCallingConventionX86 calling_convention;
5567 codegen_->GenerateUnresolvedFieldAccess(instruction,
5568 instruction->GetFieldType(),
5569 instruction->GetFieldIndex(),
5570 instruction->GetDexPc(),
5571 calling_convention);
5572}
5573
5574void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
5575 HUnresolvedStaticFieldSet* instruction) {
5576 FieldAccessCallingConventionX86 calling_convention;
5577 codegen_->CreateUnresolvedFieldLocationSummary(
5578 instruction, instruction->GetFieldType(), calling_convention);
5579}
5580
5581void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
5582 HUnresolvedStaticFieldSet* instruction) {
5583 FieldAccessCallingConventionX86 calling_convention;
5584 codegen_->GenerateUnresolvedFieldAccess(instruction,
5585 instruction->GetFieldType(),
5586 instruction->GetFieldIndex(),
5587 instruction->GetDexPc(),
5588 calling_convention);
5589}
5590
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005591void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005592 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5593 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5594 ? Location::RequiresRegister()
5595 : Location::Any();
5596 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005597}
5598
Calin Juravle2ae48182016-03-16 14:05:09 +00005599void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
5600 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005601 return;
5602 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005603 LocationSummary* locations = instruction->GetLocations();
5604 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005605
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005606 __ testl(EAX, Address(obj.AsRegister<Register>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005607 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005608}
5609
Calin Juravle2ae48182016-03-16 14:05:09 +00005610void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005611 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005612 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005613
5614 LocationSummary* locations = instruction->GetLocations();
5615 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005616
5617 if (obj.IsRegister()) {
Mark Mendell42514f62015-03-31 11:34:22 -04005618 __ testl(obj.AsRegister<Register>(), obj.AsRegister<Register>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005619 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005620 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005621 } else {
5622 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005623 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005624 __ jmp(slow_path->GetEntryLabel());
5625 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005626 }
5627 __ j(kEqual, slow_path->GetEntryLabel());
5628}
5629
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005630void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005631 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005632}
5633
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005634void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005635 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005636 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005637 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005638 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5639 object_array_get_with_read_barrier
5640 ? LocationSummary::kCallOnSlowPath
5641 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005642 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005643 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005644 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005645 locations->SetInAt(0, Location::RequiresRegister());
5646 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005647 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005648 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5649 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005650 // The output overlaps in case of long: we don't want the low move
5651 // to overwrite the array's location. Likewise, in the case of an
5652 // object array get with read barriers enabled, we do not want the
5653 // move to overwrite the array's location, as we need it to emit
5654 // the read barrier.
5655 locations->SetOut(
5656 Location::RequiresRegister(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005657 (instruction->GetType() == DataType::Type::kInt64 || object_array_get_with_read_barrier)
5658 ? Location::kOutputOverlap
5659 : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005660 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005661}
5662
5663void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
5664 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005665 Location obj_loc = locations->InAt(0);
5666 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005667 Location index = locations->InAt(1);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005668 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005669 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005670
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005671 DataType::Type type = instruction->GetType();
Calin Juravle77520bc2015-01-12 18:45:46 +00005672 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005673 case DataType::Type::kBool:
5674 case DataType::Type::kUint8: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005675 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005676 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005677 break;
5678 }
5679
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005680 case DataType::Type::kInt8: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005681 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005682 __ movsxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005683 break;
5684 }
5685
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005686 case DataType::Type::kUint16: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005687 Register out = out_loc.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07005688 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5689 // Branch cases into compressed and uncompressed for each index's type.
5690 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5691 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005692 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005693 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005694 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5695 "Expecting 0=compressed, 1=uncompressed");
5696 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005697 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
5698 __ jmp(&done);
5699 __ Bind(&not_compressed);
5700 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5701 __ Bind(&done);
5702 } else {
5703 // Common case for charAt of array of char or when string compression's
5704 // feature is turned off.
5705 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5706 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005707 break;
5708 }
5709
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005710 case DataType::Type::kInt16: {
5711 Register out = out_loc.AsRegister<Register>();
5712 __ movsxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5713 break;
5714 }
5715
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005716 case DataType::Type::kInt32: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005717 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005718 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005719 break;
5720 }
5721
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005722 case DataType::Type::kReference: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005723 static_assert(
5724 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5725 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00005726 // /* HeapReference<Object> */ out =
5727 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5728 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005729 // Note that a potential implicit null check is handled in this
5730 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
5731 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005732 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005733 } else {
5734 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005735 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
5736 codegen_->MaybeRecordImplicitNullCheck(instruction);
5737 // If read barriers are enabled, emit read barriers other than
5738 // Baker's using a slow path (and also unpoison the loaded
5739 // reference, if heap poisoning is enabled).
Roland Levillain7c1559a2015-12-15 10:55:36 +00005740 if (index.IsConstant()) {
5741 uint32_t offset =
5742 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005743 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5744 } else {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005745 codegen_->MaybeGenerateReadBarrierSlow(
5746 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5747 }
5748 }
5749 break;
5750 }
5751
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005752 case DataType::Type::kInt64: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005753 DCHECK_NE(obj, out_loc.AsRegisterPairLow<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005754 __ movl(out_loc.AsRegisterPairLow<Register>(),
5755 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
5756 codegen_->MaybeRecordImplicitNullCheck(instruction);
5757 __ movl(out_loc.AsRegisterPairHigh<Register>(),
5758 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset + kX86WordSize));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005759 break;
5760 }
5761
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005762 case DataType::Type::kFloat32: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005763 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005764 __ movss(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005765 break;
5766 }
5767
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005768 case DataType::Type::kFloat64: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005769 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005770 __ movsd(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005771 break;
5772 }
5773
Aart Bik66c158e2018-01-31 12:55:04 -08005774 case DataType::Type::kUint32:
5775 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005776 case DataType::Type::kVoid:
Calin Juravle77520bc2015-01-12 18:45:46 +00005777 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005778 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005779 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005780
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005781 if (type == DataType::Type::kReference || type == DataType::Type::kInt64) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005782 // Potential implicit null checks, in the case of reference or
5783 // long arrays, are handled in the previous switch statement.
5784 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005785 codegen_->MaybeRecordImplicitNullCheck(instruction);
5786 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005787}
5788
5789void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005790 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005791
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005792 bool needs_write_barrier =
5793 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005794 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005795
Vladimir Markoca6fff82017-10-03 14:49:14 +01005796 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffray39468442014-09-02 15:17:15 +01005797 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005798 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005799
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005800 bool is_byte_type = DataType::Size(value_type) == 1u;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005801 // We need the inputs to be different than the output in case of long operation.
5802 // In case of a byte operation, the register allocator does not support multiple
5803 // inputs that die at entry with one in a specific register.
5804 locations->SetInAt(0, Location::RequiresRegister());
5805 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5806 if (is_byte_type) {
5807 // Ensure the value is in a byte register.
5808 locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005809 } else if (DataType::IsFloatingPointType(value_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05005810 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005811 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005812 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5813 }
5814 if (needs_write_barrier) {
5815 // Temporary registers for the write barrier.
5816 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
5817 // Ensure the card is in a byte register.
Roland Levillain4f6b0b52015-11-23 19:29:22 +00005818 locations->AddTemp(Location::RegisterLocation(ECX));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005819 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005820}
5821
5822void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
5823 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005824 Location array_loc = locations->InAt(0);
5825 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005826 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005827 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005828 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005829 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005830 bool needs_write_barrier =
5831 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005832
5833 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005834 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005835 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005836 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005837 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005838 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005839 if (value.IsRegister()) {
5840 __ movb(address, value.AsRegister<ByteRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005841 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005842 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005843 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005844 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005845 break;
5846 }
5847
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005848 case DataType::Type::kUint16:
5849 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005850 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005851 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005852 if (value.IsRegister()) {
5853 __ movw(address, value.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005854 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005855 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005856 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005857 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005858 break;
5859 }
5860
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005861 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005862 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005863 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005864
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005865 if (!value.IsRegister()) {
5866 // Just setting null.
5867 DCHECK(instruction->InputAt(2)->IsNullConstant());
5868 DCHECK(value.IsConstant()) << value;
5869 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005870 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005871 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005872 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005873 break;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005874 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005875
5876 DCHECK(needs_write_barrier);
5877 Register register_value = value.AsRegister<Register>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005878 Location temp_loc = locations->GetTemp(0);
5879 Register temp = temp_loc.AsRegister<Register>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005880
5881 bool can_value_be_null = instruction->GetValueCanBeNull();
5882 NearLabel do_store;
5883 if (can_value_be_null) {
5884 __ testl(register_value, register_value);
5885 __ j(kEqual, &do_store);
5886 }
5887
5888 SlowPathCode* slow_path = nullptr;
5889 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005890 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005891 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005892
5893 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5894 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5895 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005896
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005897 // Note that when Baker read barriers are enabled, the type
5898 // checks are performed without read barriers. This is fine,
5899 // even in the case where a class object is in the from-space
5900 // after the flip, as a comparison involving such a type would
5901 // not produce a false positive; it may of course produce a
5902 // false negative, in which case we would take the ArraySet
5903 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005904
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005905 // /* HeapReference<Class> */ temp = array->klass_
5906 __ movl(temp, Address(array, class_offset));
5907 codegen_->MaybeRecordImplicitNullCheck(instruction);
5908 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005909
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005910 // /* HeapReference<Class> */ temp = temp->component_type_
5911 __ movl(temp, Address(temp, component_offset));
5912 // If heap poisoning is enabled, no need to unpoison `temp`
5913 // nor the object reference in `register_value->klass`, as
5914 // we are comparing two poisoned references.
5915 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005916
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005917 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005918 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005919 __ j(kEqual, &do_put);
5920 // If heap poisoning is enabled, the `temp` reference has
5921 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005922 __ MaybeUnpoisonHeapReference(temp);
5923
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005924 // If heap poisoning is enabled, no need to unpoison the
5925 // heap reference loaded below, as it is only used for a
5926 // comparison with null.
5927 __ cmpl(Address(temp, super_offset), Immediate(0));
5928 __ j(kNotEqual, slow_path->GetEntryLabel());
5929 __ Bind(&do_put);
5930 } else {
5931 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005932 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005933 }
5934
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005935 Register card = locations->GetTemp(1).AsRegister<Register>();
5936 codegen_->MarkGCCard(
5937 temp, card, array, value.AsRegister<Register>(), /* value_can_be_null= */ false);
5938
5939 if (can_value_be_null) {
5940 DCHECK(do_store.IsLinked());
5941 __ Bind(&do_store);
5942 }
5943
5944 Register source = register_value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005945 if (kPoisonHeapReferences) {
5946 __ movl(temp, register_value);
5947 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005948 source = temp;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005949 }
5950
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005951 __ movl(address, source);
5952
5953 if (can_value_be_null || !needs_type_check) {
5954 codegen_->MaybeRecordImplicitNullCheck(instruction);
5955 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005956
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005957 if (slow_path != nullptr) {
5958 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005959 }
5960
5961 break;
5962 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005963
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005964 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005965 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005966 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005967 if (value.IsRegister()) {
5968 __ movl(address, value.AsRegister<Register>());
5969 } else {
5970 DCHECK(value.IsConstant()) << value;
5971 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5972 __ movl(address, Immediate(v));
5973 }
5974 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005975 break;
5976 }
5977
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005978 case DataType::Type::kInt64: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005979 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005980 if (value.IsRegisterPair()) {
5981 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5982 value.AsRegisterPairLow<Register>());
5983 codegen_->MaybeRecordImplicitNullCheck(instruction);
5984 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5985 value.AsRegisterPairHigh<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005986 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005987 DCHECK(value.IsConstant());
5988 int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
5989 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5990 Immediate(Low32Bits(val)));
5991 codegen_->MaybeRecordImplicitNullCheck(instruction);
5992 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5993 Immediate(High32Bits(val)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005994 }
5995 break;
5996 }
5997
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005998 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005999 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006000 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006001 if (value.IsFpuRegister()) {
6002 __ movss(address, value.AsFpuRegister<XmmRegister>());
6003 } else {
6004 DCHECK(value.IsConstant());
6005 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
6006 __ movl(address, Immediate(v));
6007 }
6008 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006009 break;
6010 }
6011
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006012 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006013 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006014 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendell81489372015-11-04 11:30:41 -05006015 if (value.IsFpuRegister()) {
6016 __ movsd(address, value.AsFpuRegister<XmmRegister>());
6017 } else {
6018 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006019 Address address_hi =
6020 CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset + kX86WordSize);
Mark Mendell81489372015-11-04 11:30:41 -05006021 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
6022 __ movl(address, Immediate(Low32Bits(v)));
6023 codegen_->MaybeRecordImplicitNullCheck(instruction);
6024 __ movl(address_hi, Immediate(High32Bits(v)));
6025 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006026 break;
6027 }
6028
Aart Bik66c158e2018-01-31 12:55:04 -08006029 case DataType::Type::kUint32:
6030 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006031 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006032 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07006033 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006034 }
6035}
6036
6037void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006038 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01006039 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04006040 if (!instruction->IsEmittedAtUseSite()) {
6041 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6042 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006043}
6044
6045void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04006046 if (instruction->IsEmittedAtUseSite()) {
6047 return;
6048 }
6049
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006050 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01006051 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00006052 Register obj = locations->InAt(0).AsRegister<Register>();
6053 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006054 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00006055 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07006056 // Mask out most significant bit in case the array is String's array of char.
6057 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006058 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006059 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006060}
6061
6062void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006063 RegisterSet caller_saves = RegisterSet::Empty();
6064 InvokeRuntimeCallingConvention calling_convention;
6065 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6066 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
6067 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05006068 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04006069 HInstruction* length = instruction->InputAt(1);
6070 if (!length->IsEmittedAtUseSite()) {
6071 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
6072 }
jessicahandojo4877b792016-09-08 19:49:13 -07006073 // Need register to see array's length.
6074 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
6075 locations->AddTemp(Location::RequiresRegister());
6076 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006077}
6078
6079void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
jessicahandojo4877b792016-09-08 19:49:13 -07006080 const bool is_string_compressed_char_at =
6081 mirror::kUseStringCompression && instruction->IsStringCharAt();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006082 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05006083 Location index_loc = locations->InAt(0);
6084 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006085 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006086 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006087
Mark Mendell99dbd682015-04-22 16:18:52 -04006088 if (length_loc.IsConstant()) {
6089 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
6090 if (index_loc.IsConstant()) {
6091 // BCE will remove the bounds check if we are guarenteed to pass.
6092 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6093 if (index < 0 || index >= length) {
6094 codegen_->AddSlowPath(slow_path);
6095 __ jmp(slow_path->GetEntryLabel());
6096 } else {
6097 // Some optimization after BCE may have generated this, and we should not
6098 // generate a bounds check if it is a valid range.
6099 }
6100 return;
6101 }
6102
6103 // We have to reverse the jump condition because the length is the constant.
6104 Register index_reg = index_loc.AsRegister<Register>();
6105 __ cmpl(index_reg, Immediate(length));
6106 codegen_->AddSlowPath(slow_path);
6107 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006108 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04006109 HInstruction* array_length = instruction->InputAt(1);
6110 if (array_length->IsEmittedAtUseSite()) {
6111 // Address the length field in the array.
6112 DCHECK(array_length->IsArrayLength());
6113 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
6114 Location array_loc = array_length->GetLocations()->InAt(0);
6115 Address array_len(array_loc.AsRegister<Register>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07006116 if (is_string_compressed_char_at) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006117 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
6118 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07006119 Register length_reg = locations->GetTemp(0).AsRegister<Register>();
6120 __ movl(length_reg, array_len);
6121 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006122 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07006123 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04006124 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006125 // Checking bounds for general case:
6126 // Array of char or string's array with feature compression off.
6127 if (index_loc.IsConstant()) {
6128 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
6129 __ cmpl(array_len, Immediate(value));
6130 } else {
6131 __ cmpl(array_len, index_loc.AsRegister<Register>());
6132 }
6133 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04006134 }
Mark Mendell99dbd682015-04-22 16:18:52 -04006135 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006136 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04006137 }
6138 codegen_->AddSlowPath(slow_path);
6139 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05006140 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006141}
6142
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006143void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006144 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006145}
6146
6147void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006148 if (instruction->GetNext()->IsSuspendCheck() &&
6149 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6150 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6151 // The back edge will generate the suspend check.
6152 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6153 }
6154
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006155 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6156}
6157
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006158void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006159 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6160 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07006161 // In suspend check slow path, usually there are no caller-save registers at all.
6162 // If SIMD instructions are present, however, we force spilling all live SIMD
6163 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07006164 locations->SetCustomSlowPathCallerSaves(
6165 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006166}
6167
6168void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006169 HBasicBlock* block = instruction->GetBlock();
6170 if (block->GetLoopInformation() != nullptr) {
6171 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6172 // The back edge will generate the suspend check.
6173 return;
6174 }
6175 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6176 // The goto will generate the suspend check.
6177 return;
6178 }
6179 GenerateSuspendCheck(instruction, nullptr);
6180}
6181
6182void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
6183 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006184 SuspendCheckSlowPathX86* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006185 down_cast<SuspendCheckSlowPathX86*>(instruction->GetSlowPath());
6186 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006187 slow_path =
6188 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006189 instruction->SetSlowPath(slow_path);
6190 codegen_->AddSlowPath(slow_path);
6191 if (successor != nullptr) {
6192 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006193 }
6194 } else {
6195 DCHECK_EQ(slow_path->GetSuccessor(), successor);
6196 }
6197
Andreas Gampe542451c2016-07-26 09:02:02 -07006198 __ fs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00006199 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006200 if (successor == nullptr) {
6201 __ j(kNotEqual, slow_path->GetEntryLabel());
6202 __ Bind(slow_path->GetReturnLabel());
6203 } else {
6204 __ j(kEqual, codegen_->GetLabelOf(successor));
6205 __ jmp(slow_path->GetEntryLabel());
6206 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006207}
6208
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006209X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
6210 return codegen_->GetAssembler();
6211}
6212
Aart Bikcfe50bb2017-12-12 14:54:12 -08006213void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006214 ScratchRegisterScope ensure_scratch(
6215 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6216 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
6217 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
Mark Mendell7c8d0092015-01-26 11:21:33 -05006218
Aart Bikcfe50bb2017-12-12 14:54:12 -08006219 // Now that temp register is available (possibly spilled), move blocks of memory.
6220 for (int i = 0; i < number_of_words; i++) {
6221 __ movl(temp_reg, Address(ESP, src + stack_offset));
6222 __ movl(Address(ESP, dst + stack_offset), temp_reg);
6223 stack_offset += kX86WordSize;
6224 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006225}
6226
6227void ParallelMoveResolverX86::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006228 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006229 Location source = move->GetSource();
6230 Location destination = move->GetDestination();
6231
6232 if (source.IsRegister()) {
6233 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006234 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006235 } else if (destination.IsFpuRegister()) {
6236 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006237 } else {
6238 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006239 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006240 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006241 } else if (source.IsRegisterPair()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006242 size_t elem_size = DataType::Size(DataType::Type::kInt32);
David Brazdil74eb1b22015-12-14 11:44:01 +00006243 // Create stack space for 2 elements.
6244 __ subl(ESP, Immediate(2 * elem_size));
6245 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
6246 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
6247 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
6248 // And remove the temporary stack space we allocated.
6249 __ addl(ESP, Immediate(2 * elem_size));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006250 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006251 if (destination.IsRegister()) {
6252 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
6253 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006254 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006255 } else if (destination.IsRegisterPair()) {
6256 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
6257 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
6258 __ psrlq(src_reg, Immediate(32));
6259 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006260 } else if (destination.IsStackSlot()) {
6261 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006262 } else if (destination.IsDoubleStackSlot()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006263 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07006264 } else {
6265 DCHECK(destination.IsSIMDStackSlot());
6266 __ movups(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05006267 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006268 } else if (source.IsStackSlot()) {
6269 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006270 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006271 } else if (destination.IsFpuRegister()) {
6272 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006273 } else {
6274 DCHECK(destination.IsStackSlot());
Aart Bikcfe50bb2017-12-12 14:54:12 -08006275 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006276 }
6277 } else if (source.IsDoubleStackSlot()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006278 if (destination.IsRegisterPair()) {
6279 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
6280 __ movl(destination.AsRegisterPairHigh<Register>(),
6281 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
6282 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006283 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6284 } else {
6285 DCHECK(destination.IsDoubleStackSlot()) << destination;
Aart Bikcfe50bb2017-12-12 14:54:12 -08006286 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006287 }
Aart Bik5576f372017-03-23 16:17:37 -07006288 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006289 if (destination.IsFpuRegister()) {
6290 __ movups(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
6291 } else {
6292 DCHECK(destination.IsSIMDStackSlot());
6293 MoveMemoryToMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
6294 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006295 } else if (source.IsConstant()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006296 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00006297 if (constant->IsIntConstant() || constant->IsNullConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006298 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006299 if (destination.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05006300 if (value == 0) {
6301 __ xorl(destination.AsRegister<Register>(), destination.AsRegister<Register>());
6302 } else {
6303 __ movl(destination.AsRegister<Register>(), Immediate(value));
6304 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006305 } else {
6306 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell09b84632015-02-13 17:48:38 -05006307 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Mark Mendell7c8d0092015-01-26 11:21:33 -05006308 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006309 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006310 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00006311 int32_t value = bit_cast<int32_t, float>(fp_value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006312 Immediate imm(value);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006313 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006314 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
6315 if (value == 0) {
6316 // Easy handling of 0.0.
6317 __ xorps(dest, dest);
6318 } else {
6319 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006320 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6321 Register temp = static_cast<Register>(ensure_scratch.GetRegister());
6322 __ movl(temp, Immediate(value));
6323 __ movd(dest, temp);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006324 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05006325 } else {
6326 DCHECK(destination.IsStackSlot()) << destination;
6327 __ movl(Address(ESP, destination.GetStackIndex()), imm);
6328 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006329 } else if (constant->IsLongConstant()) {
6330 int64_t value = constant->AsLongConstant()->GetValue();
6331 int32_t low_value = Low32Bits(value);
6332 int32_t high_value = High32Bits(value);
6333 Immediate low(low_value);
6334 Immediate high(high_value);
6335 if (destination.IsDoubleStackSlot()) {
6336 __ movl(Address(ESP, destination.GetStackIndex()), low);
6337 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
6338 } else {
6339 __ movl(destination.AsRegisterPairLow<Register>(), low);
6340 __ movl(destination.AsRegisterPairHigh<Register>(), high);
6341 }
6342 } else {
6343 DCHECK(constant->IsDoubleConstant());
6344 double dbl_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00006345 int64_t value = bit_cast<int64_t, double>(dbl_value);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006346 int32_t low_value = Low32Bits(value);
6347 int32_t high_value = High32Bits(value);
6348 Immediate low(low_value);
6349 Immediate high(high_value);
6350 if (destination.IsFpuRegister()) {
6351 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
6352 if (value == 0) {
6353 // Easy handling of 0.0.
6354 __ xorpd(dest, dest);
6355 } else {
6356 __ pushl(high);
6357 __ pushl(low);
6358 __ movsd(dest, Address(ESP, 0));
6359 __ addl(ESP, Immediate(8));
6360 }
6361 } else {
6362 DCHECK(destination.IsDoubleStackSlot()) << destination;
6363 __ movl(Address(ESP, destination.GetStackIndex()), low);
6364 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
6365 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006366 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006367 } else {
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00006368 LOG(FATAL) << "Unimplemented move: " << destination << " <- " << source;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006369 }
6370}
6371
Mark Mendella5c19ce2015-04-01 12:51:05 -04006372void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006373 Register suggested_scratch = reg == EAX ? EBX : EAX;
6374 ScratchRegisterScope ensure_scratch(
6375 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
6376
6377 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
6378 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
6379 __ movl(Address(ESP, mem + stack_offset), reg);
6380 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006381}
6382
Mark Mendell7c8d0092015-01-26 11:21:33 -05006383void ParallelMoveResolverX86::Exchange32(XmmRegister reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006384 ScratchRegisterScope ensure_scratch(
6385 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
6386
6387 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
6388 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
6389 __ movl(temp_reg, Address(ESP, mem + stack_offset));
6390 __ movss(Address(ESP, mem + stack_offset), reg);
6391 __ movd(reg, temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006392}
6393
Aart Bikcfe50bb2017-12-12 14:54:12 -08006394void ParallelMoveResolverX86::Exchange128(XmmRegister reg, int mem) {
6395 size_t extra_slot = 4 * kX86WordSize;
6396 __ subl(ESP, Immediate(extra_slot));
6397 __ movups(Address(ESP, 0), XmmRegister(reg));
6398 ExchangeMemory(0, mem + extra_slot, 4);
6399 __ movups(XmmRegister(reg), Address(ESP, 0));
6400 __ addl(ESP, Immediate(extra_slot));
6401}
6402
6403void ParallelMoveResolverX86::ExchangeMemory(int mem1, int mem2, int number_of_words) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006404 ScratchRegisterScope ensure_scratch1(
6405 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006406
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006407 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
6408 ScratchRegisterScope ensure_scratch2(
6409 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006410
Guillaume Sancheze14590b2015-04-15 18:57:27 +00006411 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
6412 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
Aart Bikcfe50bb2017-12-12 14:54:12 -08006413
6414 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
6415 for (int i = 0; i < number_of_words; i++) {
6416 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
6417 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
6418 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
6419 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
6420 stack_offset += kX86WordSize;
6421 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006422}
6423
6424void ParallelMoveResolverX86::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006425 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006426 Location source = move->GetSource();
6427 Location destination = move->GetDestination();
6428
6429 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell90979812015-07-28 16:41:21 -04006430 // Use XOR swap algorithm to avoid serializing XCHG instruction or using a temporary.
6431 DCHECK_NE(destination.AsRegister<Register>(), source.AsRegister<Register>());
6432 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
6433 __ xorl(source.AsRegister<Register>(), destination.AsRegister<Register>());
6434 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006435 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006436 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006437 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006438 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006439 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006440 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 1);
Mark Mendell7c8d0092015-01-26 11:21:33 -05006441 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
6442 // Use XOR Swap algorithm to avoid a temporary.
6443 DCHECK_NE(source.reg(), destination.reg());
6444 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
6445 __ xorpd(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
6446 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
6447 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
6448 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6449 } else if (destination.IsFpuRegister() && source.IsStackSlot()) {
6450 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006451 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
6452 // Take advantage of the 16 bytes in the XMM register.
6453 XmmRegister reg = source.AsFpuRegister<XmmRegister>();
6454 Address stack(ESP, destination.GetStackIndex());
6455 // Load the double into the high doubleword.
6456 __ movhpd(reg, stack);
6457
6458 // Store the low double into the destination.
6459 __ movsd(stack, reg);
6460
6461 // Move the high double to the low double.
6462 __ psrldq(reg, Immediate(8));
6463 } else if (destination.IsFpuRegister() && source.IsDoubleStackSlot()) {
6464 // Take advantage of the 16 bytes in the XMM register.
6465 XmmRegister reg = destination.AsFpuRegister<XmmRegister>();
6466 Address stack(ESP, source.GetStackIndex());
6467 // Load the double into the high doubleword.
6468 __ movhpd(reg, stack);
6469
6470 // Store the low double into the destination.
6471 __ movsd(stack, reg);
6472
6473 // Move the high double to the low double.
6474 __ psrldq(reg, Immediate(8));
6475 } else if (destination.IsDoubleStackSlot() && source.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08006476 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 2);
6477 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
6478 ExchangeMemory(destination.GetStackIndex(), source.GetStackIndex(), 4);
6479 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
6480 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
6481 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
6482 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006483 } else {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006484 LOG(FATAL) << "Unimplemented: source: " << source << ", destination: " << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006485 }
6486}
6487
6488void ParallelMoveResolverX86::SpillScratch(int reg) {
6489 __ pushl(static_cast<Register>(reg));
6490}
6491
6492void ParallelMoveResolverX86::RestoreScratch(int reg) {
6493 __ popl(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006494}
6495
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006496HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
6497 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006498 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006499 case HLoadClass::LoadKind::kInvalid:
6500 LOG(FATAL) << "UNREACHABLE";
6501 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006502 case HLoadClass::LoadKind::kReferrersClass:
6503 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006504 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006505 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006506 case HLoadClass::LoadKind::kBssEntry:
Vladimir Marko764d4542017-05-16 10:31:41 +01006507 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006508 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006509 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006510 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006511 DCHECK(Runtime::Current()->UseJitCompilation());
6512 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006513 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006514 break;
6515 }
6516 return desired_class_load_kind;
6517}
6518
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006519void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006520 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006521 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006522 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00006523 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006524 cls,
6525 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00006526 Location::RegisterLocation(EAX));
Vladimir Markoea4c1262017-02-06 19:59:33 +00006527 DCHECK_EQ(calling_convention.GetRegisterAt(0), EAX);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006528 return;
6529 }
Vladimir Marko41559982017-01-06 14:04:23 +00006530 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006531
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006532 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6533 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006534 ? LocationSummary::kCallOnSlowPath
6535 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006536 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006537 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006538 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006539 }
6540
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006541 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006542 load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006543 load_kind == HLoadClass::LoadKind::kBootImageRelRo ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006544 load_kind == HLoadClass::LoadKind::kBssEntry) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006545 locations->SetInAt(0, Location::RequiresRegister());
6546 }
6547 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006548 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6549 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6550 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006551 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006552 } else {
6553 // For non-Baker read barrier we have a temp-clobbering call.
6554 }
6555 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006556}
6557
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006558Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006559 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006560 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006561 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006562 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006563 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006564 PatchInfo<Label>* info = &jit_class_patches_.back();
6565 return &info->label;
6566}
6567
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006568// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6569// move.
6570void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006571 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006572 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006573 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006574 return;
6575 }
Vladimir Marko41559982017-01-06 14:04:23 +00006576 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006577
Vladimir Marko41559982017-01-06 14:04:23 +00006578 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006579 Location out_loc = locations->Out();
6580 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006581
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006582 bool generate_null_check = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006583 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6584 ? kWithoutReadBarrier
6585 : kCompilerReadBarrierOption;
Vladimir Marko41559982017-01-06 14:04:23 +00006586 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006587 case HLoadClass::LoadKind::kReferrersClass: {
6588 DCHECK(!cls->CanCallRuntime());
6589 DCHECK(!cls->MustGenerateClinitCheck());
6590 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6591 Register current_method = locations->InAt(0).AsRegister<Register>();
6592 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006593 cls,
6594 out_loc,
6595 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006596 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006597 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006598 break;
6599 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006600 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006601 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006602 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006603 Register method_address = locations->InAt(0).AsRegister<Register>();
6604 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006605 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006606 break;
6607 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006608 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006609 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6610 Register method_address = locations->InAt(0).AsRegister<Register>();
6611 __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006612 codegen_->RecordBootImageRelRoPatch(cls->InputAt(0)->AsX86ComputeBaseMethodAddress(),
6613 codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006614 break;
6615 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006616 case HLoadClass::LoadKind::kBssEntry: {
6617 Register method_address = locations->InAt(0).AsRegister<Register>();
6618 Address address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6619 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6620 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006621 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006622 generate_null_check = true;
6623 break;
6624 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006625 case HLoadClass::LoadKind::kJitBootImageAddress: {
6626 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6627 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6628 DCHECK_NE(address, 0u);
6629 __ movl(out, Immediate(address));
6630 break;
6631 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006632 case HLoadClass::LoadKind::kJitTableAddress: {
6633 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6634 Label* fixup_label = codegen_->NewJitRootClassPatch(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006635 cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006636 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006637 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006638 break;
6639 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006640 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006641 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006642 LOG(FATAL) << "UNREACHABLE";
6643 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006644 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006645
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006646 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6647 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006648 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006649 codegen_->AddSlowPath(slow_path);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006650
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006651 if (generate_null_check) {
6652 __ testl(out, out);
6653 __ j(kEqual, slow_path->GetEntryLabel());
6654 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006655
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006656 if (cls->MustGenerateClinitCheck()) {
6657 GenerateClassInitializationCheck(slow_path, out);
6658 } else {
6659 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006660 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006661 }
6662}
6663
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006664void LocationsBuilderX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6665 InvokeRuntimeCallingConvention calling_convention;
6666 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6667 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6668}
6669
6670void InstructionCodeGeneratorX86::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6671 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6672}
6673
Orion Hodson18259d72018-04-12 11:18:23 +01006674void LocationsBuilderX86::VisitLoadMethodType(HLoadMethodType* load) {
6675 InvokeRuntimeCallingConvention calling_convention;
6676 Location location = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6677 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6678}
6679
6680void InstructionCodeGeneratorX86::VisitLoadMethodType(HLoadMethodType* load) {
6681 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6682}
6683
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006684void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
6685 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006686 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006687 locations->SetInAt(0, Location::RequiresRegister());
6688 if (check->HasUses()) {
6689 locations->SetOut(Location::SameAsFirstInput());
6690 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006691 // Rely on the type initialization to save everything we need.
6692 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006693}
6694
6695void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006696 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006697 SlowPathCode* slow_path =
6698 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006699 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006700 GenerateClassInitializationCheck(slow_path,
6701 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006702}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006703
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006704void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006705 SlowPathCode* slow_path, Register class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00006706 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
6707 const size_t status_byte_offset =
6708 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
6709 constexpr uint32_t shifted_initialized_value =
6710 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
6711
6712 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00006713 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006714 __ Bind(slow_path->GetExitLabel());
6715 // No need for memory fence, thanks to the X86 memory model.
6716}
6717
Vladimir Marko175e7862018-03-27 09:03:13 +00006718void InstructionCodeGeneratorX86::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
6719 Register temp) {
6720 uint32_t path_to_root = check->GetBitstringPathToRoot();
6721 uint32_t mask = check->GetBitstringMask();
6722 DCHECK(IsPowerOfTwo(mask + 1));
6723 size_t mask_bits = WhichPowerOf2(mask + 1);
6724
6725 if (mask_bits == 16u) {
6726 // Compare the bitstring in memory.
6727 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
6728 } else {
6729 // /* uint32_t */ temp = temp->status_
6730 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
6731 // Compare the bitstring bits using SUB.
6732 __ subl(temp, Immediate(path_to_root));
6733 // Shift out bits that do not contribute to the comparison.
6734 __ shll(temp, Immediate(32u - mask_bits));
6735 }
6736}
6737
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006738HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
6739 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006740 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006741 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006742 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006743 case HLoadString::LoadKind::kBssEntry:
Vladimir Marko764d4542017-05-16 10:31:41 +01006744 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006745 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006746 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006747 case HLoadString::LoadKind::kJitTableAddress:
6748 DCHECK(Runtime::Current()->UseJitCompilation());
6749 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006750 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006751 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006752 }
6753 return desired_string_load_kind;
6754}
6755
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006756void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006757 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006758 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006759 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006760 if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006761 load_kind == HLoadString::LoadKind::kBootImageRelRo ||
Vladimir Markoaad75c62016-10-03 08:46:48 +00006762 load_kind == HLoadString::LoadKind::kBssEntry) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006763 locations->SetInAt(0, Location::RequiresRegister());
6764 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006765 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006766 locations->SetOut(Location::RegisterLocation(EAX));
6767 } else {
6768 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006769 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6770 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006771 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006772 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006773 } else {
6774 // For non-Baker read barrier we have a temp-clobbering call.
6775 }
6776 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006777 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006778}
6779
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006780Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006781 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006782 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006783 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006784 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006785 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006786 PatchInfo<Label>* info = &jit_string_patches_.back();
6787 return &info->label;
6788}
6789
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006790// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6791// move.
6792void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006793 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006794 Location out_loc = locations->Out();
6795 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006796
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006797 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006798 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006799 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006800 Register method_address = locations->InAt(0).AsRegister<Register>();
6801 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006802 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006803 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006804 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006805 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006806 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6807 Register method_address = locations->InAt(0).AsRegister<Register>();
6808 __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006809 codegen_->RecordBootImageRelRoPatch(load->InputAt(0)->AsX86ComputeBaseMethodAddress(),
6810 codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006811 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006812 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006813 case HLoadString::LoadKind::kBssEntry: {
6814 Register method_address = locations->InAt(0).AsRegister<Register>();
6815 Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6816 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006817 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006818 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006819 // No need for memory fence, thanks to the x86 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006820 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006821 codegen_->AddSlowPath(slow_path);
6822 __ testl(out, out);
6823 __ j(kEqual, slow_path->GetEntryLabel());
6824 __ Bind(slow_path->GetExitLabel());
6825 return;
6826 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006827 case HLoadString::LoadKind::kJitBootImageAddress: {
6828 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6829 DCHECK_NE(address, 0u);
6830 __ movl(out, Immediate(address));
6831 return;
6832 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006833 case HLoadString::LoadKind::kJitTableAddress: {
6834 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6835 Label* fixup_label = codegen_->NewJitRootStringPatch(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006836 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006837 // /* GcRoot<mirror::String> */ out = *address
6838 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6839 return;
6840 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006841 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006842 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006843 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006844
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006845 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006846 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006847 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006848 __ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex().index_));
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006849 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6850 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006851}
6852
David Brazdilcb1c0552015-08-04 16:22:25 +01006853static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006854 return Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01006855}
6856
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006857void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
6858 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006859 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006860 locations->SetOut(Location::RequiresRegister());
6861}
6862
6863void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006864 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
6865}
6866
6867void LocationsBuilderX86::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006868 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006869}
6870
6871void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6872 __ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006873}
6874
6875void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006876 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6877 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006878 InvokeRuntimeCallingConvention calling_convention;
6879 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6880}
6881
6882void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006883 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006884 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006885}
6886
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006887// Temp is used for read barrier.
6888static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6889 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006890 !kUseBakerReadBarrier &&
6891 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain7c1559a2015-12-15 10:55:36 +00006892 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006893 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6894 return 1;
6895 }
6896 return 0;
6897}
6898
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006899// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6900// interface pointer, the current interface is compared in memory.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006901// The other checks have one temp for loading the object's class.
6902static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006903 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006904 return 2;
6905 }
6906 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006907}
6908
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006909void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006910 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006911 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006912 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006913 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006914 case TypeCheckKind::kExactCheck:
6915 case TypeCheckKind::kAbstractClassCheck:
6916 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006917 case TypeCheckKind::kArrayObjectCheck: {
6918 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6919 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6920 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006921 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006922 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006923 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006924 case TypeCheckKind::kUnresolvedCheck:
6925 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006926 call_kind = LocationSummary::kCallOnSlowPath;
6927 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006928 case TypeCheckKind::kBitstringCheck:
6929 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006930 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006931
Vladimir Markoca6fff82017-10-03 14:49:14 +01006932 LocationSummary* locations =
6933 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006934 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006935 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006936 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006937 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006938 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6939 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6940 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6941 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6942 } else {
6943 locations->SetInAt(1, Location::Any());
6944 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006945 // Note that TypeCheckSlowPathX86 uses this "out" register too.
6946 locations->SetOut(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006947 // When read barriers are enabled, we need a temporary register for some cases.
6948 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006949}
6950
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006951void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006952 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006953 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006954 Location obj_loc = locations->InAt(0);
6955 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006956 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006957 Location out_loc = locations->Out();
6958 Register out = out_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006959 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6960 DCHECK_LE(num_temps, 1u);
6961 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006962 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006963 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6964 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6965 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006966 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006967 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006968
6969 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006970 // Avoid null check if we know obj is not null.
6971 if (instruction->MustDoNullCheck()) {
6972 __ testl(obj, obj);
6973 __ j(kEqual, &zero);
6974 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006975
Roland Levillain7c1559a2015-12-15 10:55:36 +00006976 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006977 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006978 ReadBarrierOption read_barrier_option =
6979 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006980 // /* HeapReference<Class> */ out = obj->klass_
6981 GenerateReferenceLoadTwoRegisters(instruction,
6982 out_loc,
6983 obj_loc,
6984 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006985 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006986 if (cls.IsRegister()) {
6987 __ cmpl(out, cls.AsRegister<Register>());
6988 } else {
6989 DCHECK(cls.IsStackSlot()) << cls;
6990 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6991 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006992
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006993 // Classes must be equal for the instanceof to succeed.
6994 __ j(kNotEqual, &zero);
6995 __ movl(out, Immediate(1));
6996 __ jmp(&done);
6997 break;
6998 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006999
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007000 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007001 ReadBarrierOption read_barrier_option =
7002 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007003 // /* HeapReference<Class> */ out = obj->klass_
7004 GenerateReferenceLoadTwoRegisters(instruction,
7005 out_loc,
7006 obj_loc,
7007 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007008 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007009 // If the class is abstract, we eagerly fetch the super class of the
7010 // object to avoid doing a comparison we know will fail.
7011 NearLabel loop;
7012 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007013 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007014 GenerateReferenceLoadOneRegister(instruction,
7015 out_loc,
7016 super_offset,
7017 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007018 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007019 __ testl(out, out);
7020 // If `out` is null, we use it for the result, and jump to `done`.
7021 __ j(kEqual, &done);
7022 if (cls.IsRegister()) {
7023 __ cmpl(out, cls.AsRegister<Register>());
7024 } else {
7025 DCHECK(cls.IsStackSlot()) << cls;
7026 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7027 }
7028 __ j(kNotEqual, &loop);
7029 __ movl(out, Immediate(1));
7030 if (zero.IsLinked()) {
7031 __ jmp(&done);
7032 }
7033 break;
7034 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007035
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007036 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007037 ReadBarrierOption read_barrier_option =
7038 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007039 // /* HeapReference<Class> */ out = obj->klass_
7040 GenerateReferenceLoadTwoRegisters(instruction,
7041 out_loc,
7042 obj_loc,
7043 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007044 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007045 // Walk over the class hierarchy to find a match.
7046 NearLabel loop, success;
7047 __ Bind(&loop);
7048 if (cls.IsRegister()) {
7049 __ cmpl(out, cls.AsRegister<Register>());
7050 } else {
7051 DCHECK(cls.IsStackSlot()) << cls;
7052 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7053 }
7054 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007055 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007056 GenerateReferenceLoadOneRegister(instruction,
7057 out_loc,
7058 super_offset,
7059 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007060 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007061 __ testl(out, out);
7062 __ j(kNotEqual, &loop);
7063 // If `out` is null, we use it for the result, and jump to `done`.
7064 __ jmp(&done);
7065 __ Bind(&success);
7066 __ movl(out, Immediate(1));
7067 if (zero.IsLinked()) {
7068 __ jmp(&done);
7069 }
7070 break;
7071 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007072
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007073 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00007074 ReadBarrierOption read_barrier_option =
7075 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007076 // /* HeapReference<Class> */ out = obj->klass_
7077 GenerateReferenceLoadTwoRegisters(instruction,
7078 out_loc,
7079 obj_loc,
7080 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00007081 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007082 // Do an exact check.
7083 NearLabel exact_check;
7084 if (cls.IsRegister()) {
7085 __ cmpl(out, cls.AsRegister<Register>());
7086 } else {
7087 DCHECK(cls.IsStackSlot()) << cls;
7088 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7089 }
7090 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007091 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007092 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007093 GenerateReferenceLoadOneRegister(instruction,
7094 out_loc,
7095 component_offset,
7096 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00007097 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007098 __ testl(out, out);
7099 // If `out` is null, we use it for the result, and jump to `done`.
7100 __ j(kEqual, &done);
7101 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
7102 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007103 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007104 __ movl(out, Immediate(1));
7105 __ jmp(&done);
7106 break;
7107 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007108
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007109 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007110 // No read barrier since the slow path will retry upon failure.
7111 // /* HeapReference<Class> */ out = obj->klass_
7112 GenerateReferenceLoadTwoRegisters(instruction,
7113 out_loc,
7114 obj_loc,
7115 class_offset,
7116 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007117 if (cls.IsRegister()) {
7118 __ cmpl(out, cls.AsRegister<Register>());
7119 } else {
7120 DCHECK(cls.IsStackSlot()) << cls;
7121 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
7122 }
7123 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007124 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007125 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007126 codegen_->AddSlowPath(slow_path);
7127 __ j(kNotEqual, slow_path->GetEntryLabel());
7128 __ movl(out, Immediate(1));
7129 if (zero.IsLinked()) {
7130 __ jmp(&done);
7131 }
7132 break;
7133 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007134
Calin Juravle98893e12015-10-02 21:05:03 +01007135 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00007136 case TypeCheckKind::kInterfaceCheck: {
7137 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007138 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00007139 // cases.
7140 //
7141 // We cannot directly call the InstanceofNonTrivial runtime
7142 // entry point without resorting to a type checking slow path
7143 // here (i.e. by calling InvokeRuntime directly), as it would
7144 // require to assign fixed registers for the inputs of this
7145 // HInstanceOf instruction (following the runtime calling
7146 // convention), which might be cluttered by the potential first
7147 // read barrier emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007148 //
7149 // TODO: Introduce a new runtime entry point taking the object
7150 // to test (instead of its class) as argument, and let it deal
7151 // with the read barrier issues. This will let us refactor this
7152 // case of the `switch` code as it was previously (with a direct
7153 // call to the runtime not using a type checking slow path).
7154 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007155 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01007156 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007157 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007158 codegen_->AddSlowPath(slow_path);
7159 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007160 if (zero.IsLinked()) {
7161 __ jmp(&done);
7162 }
7163 break;
7164 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007165
7166 case TypeCheckKind::kBitstringCheck: {
7167 // /* HeapReference<Class> */ temp = obj->klass_
7168 GenerateReferenceLoadTwoRegisters(instruction,
7169 out_loc,
7170 obj_loc,
7171 class_offset,
7172 kWithoutReadBarrier);
7173
7174 GenerateBitstringTypeCheckCompare(instruction, out);
7175 __ j(kNotEqual, &zero);
7176 __ movl(out, Immediate(1));
7177 __ jmp(&done);
7178 break;
7179 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007180 }
7181
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007182 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007183 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007184 __ xorl(out, out);
7185 }
7186
7187 if (done.IsLinked()) {
7188 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007189 }
7190
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007191 if (slow_path != nullptr) {
7192 __ Bind(slow_path->GetExitLabel());
7193 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007194}
7195
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007196void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007197 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00007198 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01007199 LocationSummary* locations =
7200 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007201 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007202 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
7203 // Require a register for the interface check since there is a loop that compares the class to
7204 // a memory address.
7205 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00007206 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
7207 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
7208 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
7209 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007210 } else {
7211 locations->SetInAt(1, Location::Any());
7212 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01007213 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007214 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
7215}
7216
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007217void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007218 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007219 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00007220 Location obj_loc = locations->InAt(0);
7221 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007222 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007223 Location temp_loc = locations->GetTemp(0);
7224 Register temp = temp_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007225 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
7226 DCHECK_GE(num_temps, 1u);
7227 DCHECK_LE(num_temps, 2u);
7228 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
7229 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7230 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7231 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7232 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
7233 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
7234 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
7235 const uint32_t object_array_data_offset =
7236 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007237
Vladimir Marko87584542017-12-12 17:47:52 +00007238 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007239 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007240 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86(
7241 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007242 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007243
Roland Levillain0d5a2812015-11-13 10:07:31 +00007244 NearLabel done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007245 // Avoid null check if we know obj is not null.
7246 if (instruction->MustDoNullCheck()) {
7247 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007248 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007249 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007250
Roland Levillain0d5a2812015-11-13 10:07:31 +00007251 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007252 case TypeCheckKind::kExactCheck:
7253 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007254 // /* HeapReference<Class> */ temp = obj->klass_
7255 GenerateReferenceLoadTwoRegisters(instruction,
7256 temp_loc,
7257 obj_loc,
7258 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007259 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007260
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007261 if (cls.IsRegister()) {
7262 __ cmpl(temp, cls.AsRegister<Register>());
7263 } else {
7264 DCHECK(cls.IsStackSlot()) << cls;
7265 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7266 }
7267 // Jump to slow path for throwing the exception or doing a
7268 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007269 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007270 break;
7271 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007272
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007273 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007274 // /* HeapReference<Class> */ temp = obj->klass_
7275 GenerateReferenceLoadTwoRegisters(instruction,
7276 temp_loc,
7277 obj_loc,
7278 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007279 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007280
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007281 // If the class is abstract, we eagerly fetch the super class of the
7282 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007283 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007284 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007285 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007286 GenerateReferenceLoadOneRegister(instruction,
7287 temp_loc,
7288 super_offset,
7289 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007290 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007291
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007292 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7293 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007294 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007295 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00007296
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007297 // Otherwise, compare the classes
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007298 if (cls.IsRegister()) {
7299 __ cmpl(temp, cls.AsRegister<Register>());
7300 } else {
7301 DCHECK(cls.IsStackSlot()) << cls;
7302 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7303 }
7304 __ j(kNotEqual, &loop);
7305 break;
7306 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007307
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007308 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007309 // /* HeapReference<Class> */ temp = obj->klass_
7310 GenerateReferenceLoadTwoRegisters(instruction,
7311 temp_loc,
7312 obj_loc,
7313 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007314 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007315
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007316 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007317 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007318 __ Bind(&loop);
7319 if (cls.IsRegister()) {
7320 __ cmpl(temp, cls.AsRegister<Register>());
7321 } else {
7322 DCHECK(cls.IsStackSlot()) << cls;
7323 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7324 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007325 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007326
Roland Levillain0d5a2812015-11-13 10:07:31 +00007327 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007328 GenerateReferenceLoadOneRegister(instruction,
7329 temp_loc,
7330 super_offset,
7331 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007332 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007333
7334 // If the class reference currently in `temp` is not null, jump
7335 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007336 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007337 __ j(kNotZero, &loop);
7338 // Otherwise, jump to the slow path to throw the exception.;
Roland Levillain0d5a2812015-11-13 10:07:31 +00007339 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007340 break;
7341 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007342
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007343 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007344 // /* HeapReference<Class> */ temp = obj->klass_
7345 GenerateReferenceLoadTwoRegisters(instruction,
7346 temp_loc,
7347 obj_loc,
7348 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007349 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007350
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007351 // Do an exact check.
7352 if (cls.IsRegister()) {
7353 __ cmpl(temp, cls.AsRegister<Register>());
7354 } else {
7355 DCHECK(cls.IsStackSlot()) << cls;
7356 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
7357 }
7358 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007359
7360 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007361 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007362 GenerateReferenceLoadOneRegister(instruction,
7363 temp_loc,
7364 component_offset,
7365 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007366 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007367
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007368 // If the component type is null (i.e. the object not an array), jump to the slow path to
7369 // throw the exception. Otherwise proceed with the check.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007370 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007371 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00007372
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007373 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007374 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007375 break;
7376 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00007377
Calin Juravle98893e12015-10-02 21:05:03 +01007378 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007379 // We always go into the type check slow path for the unresolved check case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00007380 // We cannot directly call the CheckCast runtime entry point
7381 // without resorting to a type checking slow path here (i.e. by
7382 // calling InvokeRuntime directly), as it would require to
7383 // assign fixed registers for the inputs of this HInstanceOf
7384 // instruction (following the runtime calling convention), which
7385 // might be cluttered by the potential first read barrier
7386 // emission at the beginning of this method.
7387 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007388 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007389
7390 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007391 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
7392 // We can not get false positives by doing this.
7393 // /* HeapReference<Class> */ temp = obj->klass_
7394 GenerateReferenceLoadTwoRegisters(instruction,
7395 temp_loc,
7396 obj_loc,
7397 class_offset,
7398 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007399
Vladimir Markoe619f6c2017-12-12 16:00:01 +00007400 // /* HeapReference<Class> */ temp = temp->iftable_
7401 GenerateReferenceLoadTwoRegisters(instruction,
7402 temp_loc,
7403 temp_loc,
7404 iftable_offset,
7405 kWithoutReadBarrier);
7406 // Iftable is never null.
7407 __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
7408 // Maybe poison the `cls` for direct comparison with memory.
7409 __ MaybePoisonHeapReference(cls.AsRegister<Register>());
7410 // Loop through the iftable and check if any class matches.
7411 NearLabel start_loop;
7412 __ Bind(&start_loop);
7413 // Need to subtract first to handle the empty array case.
7414 __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
7415 __ j(kNegative, type_check_slow_path->GetEntryLabel());
7416 // Go to next interface if the classes do not match.
7417 __ cmpl(cls.AsRegister<Register>(),
7418 CodeGeneratorX86::ArrayAddress(temp,
7419 maybe_temp2_loc,
7420 TIMES_4,
7421 object_array_data_offset));
7422 __ j(kNotEqual, &start_loop);
7423 // If `cls` was poisoned above, unpoison it.
7424 __ MaybeUnpoisonHeapReference(cls.AsRegister<Register>());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007425 break;
7426 }
Vladimir Marko175e7862018-03-27 09:03:13 +00007427
7428 case TypeCheckKind::kBitstringCheck: {
7429 // /* HeapReference<Class> */ temp = obj->klass_
7430 GenerateReferenceLoadTwoRegisters(instruction,
7431 temp_loc,
7432 obj_loc,
7433 class_offset,
7434 kWithoutReadBarrier);
7435
7436 GenerateBitstringTypeCheckCompare(instruction, temp);
7437 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
7438 break;
7439 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007440 }
7441 __ Bind(&done);
7442
Roland Levillain0d5a2812015-11-13 10:07:31 +00007443 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007444}
7445
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007446void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007447 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7448 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007449 InvokeRuntimeCallingConvention calling_convention;
7450 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7451}
7452
7453void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01007454 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
7455 : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01007456 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01007457 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00007458 if (instruction->IsEnter()) {
7459 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7460 } else {
7461 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7462 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007463}
7464
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05307465void LocationsBuilderX86::VisitX86AndNot(HX86AndNot* instruction) {
7466 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7467 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
7468 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7469 locations->SetInAt(0, Location::RequiresRegister());
7470 locations->SetInAt(1, Location::RequiresRegister());
7471 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7472}
7473
7474void InstructionCodeGeneratorX86::VisitX86AndNot(HX86AndNot* instruction) {
7475 LocationSummary* locations = instruction->GetLocations();
7476 Location first = locations->InAt(0);
7477 Location second = locations->InAt(1);
7478 Location dest = locations->Out();
7479 if (instruction->GetResultType() == DataType::Type::kInt32) {
7480 __ andn(dest.AsRegister<Register>(),
7481 first.AsRegister<Register>(),
7482 second.AsRegister<Register>());
7483 } else {
7484 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
7485 __ andn(dest.AsRegisterPairLow<Register>(),
7486 first.AsRegisterPairLow<Register>(),
7487 second.AsRegisterPairLow<Register>());
7488 __ andn(dest.AsRegisterPairHigh<Register>(),
7489 first.AsRegisterPairHigh<Register>(),
7490 second.AsRegisterPairHigh<Register>());
7491 }
7492}
7493
7494void LocationsBuilderX86::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
7495 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
7496 DCHECK(instruction->GetType() == DataType::Type::kInt32) << instruction->GetType();
7497 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
7498 locations->SetInAt(0, Location::RequiresRegister());
7499 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7500}
7501
7502void InstructionCodeGeneratorX86::VisitX86MaskOrResetLeastSetBit(
7503 HX86MaskOrResetLeastSetBit* instruction) {
7504 LocationSummary* locations = instruction->GetLocations();
7505 Location src = locations->InAt(0);
7506 Location dest = locations->Out();
7507 DCHECK(instruction->GetResultType() == DataType::Type::kInt32);
7508 switch (instruction->GetOpKind()) {
7509 case HInstruction::kAnd:
7510 __ blsr(dest.AsRegister<Register>(), src.AsRegister<Register>());
7511 break;
7512 case HInstruction::kXor:
7513 __ blsmsk(dest.AsRegister<Register>(), src.AsRegister<Register>());
7514 break;
7515 default:
7516 LOG(FATAL) << "Unreachable";
7517 }
7518}
7519
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007520void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
7521void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
7522void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
7523
7524void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
7525 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007526 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007527 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
7528 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007529 locations->SetInAt(0, Location::RequiresRegister());
7530 locations->SetInAt(1, Location::Any());
7531 locations->SetOut(Location::SameAsFirstInput());
7532}
7533
7534void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
7535 HandleBitwiseOperation(instruction);
7536}
7537
7538void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
7539 HandleBitwiseOperation(instruction);
7540}
7541
7542void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
7543 HandleBitwiseOperation(instruction);
7544}
7545
7546void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
7547 LocationSummary* locations = instruction->GetLocations();
7548 Location first = locations->InAt(0);
7549 Location second = locations->InAt(1);
7550 DCHECK(first.Equals(locations->Out()));
7551
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007552 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007553 if (second.IsRegister()) {
7554 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007555 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007556 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007557 __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007558 } else {
7559 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007560 __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007561 }
7562 } else if (second.IsConstant()) {
7563 if (instruction->IsAnd()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007564 __ andl(first.AsRegister<Register>(),
7565 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007566 } else if (instruction->IsOr()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007567 __ orl(first.AsRegister<Register>(),
7568 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007569 } else {
7570 DCHECK(instruction->IsXor());
Roland Levillain199f3362014-11-27 17:15:16 +00007571 __ xorl(first.AsRegister<Register>(),
7572 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007573 }
7574 } else {
7575 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007576 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007577 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007578 __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007579 } else {
7580 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007581 __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007582 }
7583 }
7584 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007585 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007586 if (second.IsRegisterPair()) {
7587 if (instruction->IsAnd()) {
7588 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7589 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7590 } else if (instruction->IsOr()) {
7591 __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7592 __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7593 } else {
7594 DCHECK(instruction->IsXor());
7595 __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7596 __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7597 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007598 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007599 if (instruction->IsAnd()) {
7600 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7601 __ andl(first.AsRegisterPairHigh<Register>(),
7602 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7603 } else if (instruction->IsOr()) {
7604 __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7605 __ orl(first.AsRegisterPairHigh<Register>(),
7606 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7607 } else {
7608 DCHECK(instruction->IsXor());
7609 __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7610 __ xorl(first.AsRegisterPairHigh<Register>(),
7611 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7612 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007613 } else {
7614 DCHECK(second.IsConstant()) << second;
7615 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007616 int32_t low_value = Low32Bits(value);
7617 int32_t high_value = High32Bits(value);
7618 Immediate low(low_value);
7619 Immediate high(high_value);
7620 Register first_low = first.AsRegisterPairLow<Register>();
7621 Register first_high = first.AsRegisterPairHigh<Register>();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007622 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007623 if (low_value == 0) {
7624 __ xorl(first_low, first_low);
7625 } else if (low_value != -1) {
7626 __ andl(first_low, low);
7627 }
7628 if (high_value == 0) {
7629 __ xorl(first_high, first_high);
7630 } else if (high_value != -1) {
7631 __ andl(first_high, high);
7632 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007633 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007634 if (low_value != 0) {
7635 __ orl(first_low, low);
7636 }
7637 if (high_value != 0) {
7638 __ orl(first_high, high);
7639 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007640 } else {
7641 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007642 if (low_value != 0) {
7643 __ xorl(first_low, low);
7644 }
7645 if (high_value != 0) {
7646 __ xorl(first_high, high);
7647 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007648 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007649 }
7650 }
7651}
7652
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007653void InstructionCodeGeneratorX86::GenerateReferenceLoadOneRegister(
7654 HInstruction* instruction,
7655 Location out,
7656 uint32_t offset,
7657 Location maybe_temp,
7658 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007659 Register out_reg = out.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007660 if (read_barrier_option == kWithReadBarrier) {
7661 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007662 if (kUseBakerReadBarrier) {
7663 // Load with fast path based Baker's read barrier.
7664 // /* HeapReference<Object> */ out = *(out + offset)
7665 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007666 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007667 } else {
7668 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007669 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain7c1559a2015-12-15 10:55:36 +00007670 // in the following move operation, as we will need it for the
7671 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007672 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007673 __ movl(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007674 // /* HeapReference<Object> */ out = *(out + offset)
7675 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007676 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007677 }
7678 } else {
7679 // Plain load with no read barrier.
7680 // /* HeapReference<Object> */ out = *(out + offset)
7681 __ movl(out_reg, Address(out_reg, offset));
7682 __ MaybeUnpoisonHeapReference(out_reg);
7683 }
7684}
7685
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007686void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(
7687 HInstruction* instruction,
7688 Location out,
7689 Location obj,
7690 uint32_t offset,
7691 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007692 Register out_reg = out.AsRegister<Register>();
7693 Register obj_reg = obj.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007694 if (read_barrier_option == kWithReadBarrier) {
7695 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007696 if (kUseBakerReadBarrier) {
7697 // Load with fast path based Baker's read barrier.
7698 // /* HeapReference<Object> */ out = *(obj + offset)
7699 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007700 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007701 } else {
7702 // Load with slow path based read barrier.
7703 // /* HeapReference<Object> */ out = *(obj + offset)
7704 __ movl(out_reg, Address(obj_reg, offset));
7705 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7706 }
7707 } else {
7708 // Plain load with no read barrier.
7709 // /* HeapReference<Object> */ out = *(obj + offset)
7710 __ movl(out_reg, Address(obj_reg, offset));
7711 __ MaybeUnpoisonHeapReference(out_reg);
7712 }
7713}
7714
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007715void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(
7716 HInstruction* instruction,
7717 Location root,
7718 const Address& address,
7719 Label* fixup_label,
7720 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007721 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007722 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007723 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007724 if (kUseBakerReadBarrier) {
7725 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7726 // Baker's read barrier are used:
7727 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007728 // root = obj.field;
7729 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7730 // if (temp != null) {
7731 // root = temp(root)
Roland Levillain7c1559a2015-12-15 10:55:36 +00007732 // }
7733
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007734 // /* GcRoot<mirror::Object> */ root = *address
7735 __ movl(root_reg, address);
7736 if (fixup_label != nullptr) {
7737 __ Bind(fixup_label);
7738 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007739 static_assert(
7740 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7741 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7742 "have different sizes.");
7743 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7744 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7745 "have different sizes.");
7746
Vladimir Marko953437b2016-08-24 08:30:46 +00007747 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007748 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007749 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007750 codegen_->AddSlowPath(slow_path);
7751
Roland Levillaind966ce72017-02-09 16:20:14 +00007752 // Test the entrypoint (`Thread::Current()->pReadBarrierMarkReg ## root.reg()`).
7753 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007754 Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(root.reg());
Roland Levillaind966ce72017-02-09 16:20:14 +00007755 __ fs()->cmpl(Address::Absolute(entry_point_offset), Immediate(0));
7756 // The entrypoint is null when the GC is not marking.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007757 __ j(kNotEqual, slow_path->GetEntryLabel());
7758 __ Bind(slow_path->GetExitLabel());
7759 } else {
7760 // GC root loaded through a slow path for read barriers other
7761 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007762 // /* GcRoot<mirror::Object>* */ root = address
7763 __ leal(root_reg, address);
7764 if (fixup_label != nullptr) {
7765 __ Bind(fixup_label);
7766 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007767 // /* mirror::Object* */ root = root->Read()
7768 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7769 }
7770 } else {
7771 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007772 // /* GcRoot<mirror::Object> */ root = *address
7773 __ movl(root_reg, address);
7774 if (fixup_label != nullptr) {
7775 __ Bind(fixup_label);
7776 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007777 // Note that GC roots are not affected by heap poisoning, thus we
7778 // do not have to unpoison `root_reg` here.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007779 }
7780}
7781
7782void CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7783 Location ref,
7784 Register obj,
7785 uint32_t offset,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007786 bool needs_null_check) {
7787 DCHECK(kEmitCompilerReadBarrier);
7788 DCHECK(kUseBakerReadBarrier);
7789
7790 // /* HeapReference<Object> */ ref = *(obj + offset)
7791 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007792 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007793}
7794
7795void CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7796 Location ref,
7797 Register obj,
7798 uint32_t data_offset,
7799 Location index,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007800 bool needs_null_check) {
7801 DCHECK(kEmitCompilerReadBarrier);
7802 DCHECK(kUseBakerReadBarrier);
7803
Roland Levillain3d312422016-06-23 13:53:42 +01007804 static_assert(
7805 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7806 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00007807 // /* HeapReference<Object> */ ref =
7808 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007809 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007810 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007811}
7812
7813void CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7814 Location ref,
7815 Register obj,
7816 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007817 bool needs_null_check,
7818 bool always_update_field,
7819 Register* temp) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007820 DCHECK(kEmitCompilerReadBarrier);
7821 DCHECK(kUseBakerReadBarrier);
7822
7823 // In slow path based read barriers, the read barrier call is
7824 // inserted after the original load. However, in fast path based
7825 // Baker's read barriers, we need to perform the load of
7826 // mirror::Object::monitor_ *before* the original reference load.
7827 // This load-load ordering is required by the read barrier.
7828 // The fast path/slow path (for Baker's algorithm) should look like:
7829 //
7830 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7831 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7832 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007833 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007834 // if (is_gray) {
7835 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7836 // }
7837 //
7838 // Note: the original implementation in ReadBarrier::Barrier is
7839 // slightly more complex as:
7840 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007841 // the high-bits of rb_state, which are expected to be all zeroes
7842 // (we use CodeGeneratorX86::GenerateMemoryBarrier instead here,
7843 // which is a no-op thanks to the x86 memory model);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007844 // - it performs additional checks that we do not do here for
7845 // performance reasons.
7846
7847 Register ref_reg = ref.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +00007848 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7849
Vladimir Marko953437b2016-08-24 08:30:46 +00007850 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007851 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007852 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007853 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7854 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7855 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7856
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007857 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007858 // ref = ReadBarrier::Mark(ref);
7859 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7860 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain7c1559a2015-12-15 10:55:36 +00007861 if (needs_null_check) {
7862 MaybeRecordImplicitNullCheck(instruction);
7863 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007864
7865 // Load fence to prevent load-load reordering.
7866 // Note that this is a no-op, thanks to the x86 memory model.
7867 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7868
7869 // The actual reference load.
7870 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007871 __ movl(ref_reg, src); // Flags are unaffected.
7872
7873 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7874 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007875 SlowPathCode* slow_path;
7876 if (always_update_field) {
7877 DCHECK(temp != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007878 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007879 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007880 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007881 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86(
Andreas Gampe3db70682018-12-26 15:12:03 -08007882 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007883 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007884 AddSlowPath(slow_path);
7885
7886 // We have done the "if" of the gray bit check above, now branch based on the flags.
7887 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007888
7889 // Object* ref = ref_addr->AsMirrorPtr()
7890 __ MaybeUnpoisonHeapReference(ref_reg);
7891
Roland Levillain7c1559a2015-12-15 10:55:36 +00007892 __ Bind(slow_path->GetExitLabel());
7893}
7894
7895void CodeGeneratorX86::GenerateReadBarrierSlow(HInstruction* instruction,
7896 Location out,
7897 Location ref,
7898 Location obj,
7899 uint32_t offset,
7900 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007901 DCHECK(kEmitCompilerReadBarrier);
7902
Roland Levillain7c1559a2015-12-15 10:55:36 +00007903 // Insert a slow path based read barrier *after* the reference load.
7904 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007905 // If heap poisoning is enabled, the unpoisoning of the loaded
7906 // reference will be carried out by the runtime within the slow
7907 // path.
7908 //
7909 // Note that `ref` currently does not get unpoisoned (when heap
7910 // poisoning is enabled), which is alright as the `ref` argument is
7911 // not used by the artReadBarrierSlow entry point.
7912 //
7913 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007914 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007915 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
7916 AddSlowPath(slow_path);
7917
Roland Levillain0d5a2812015-11-13 10:07:31 +00007918 __ jmp(slow_path->GetEntryLabel());
7919 __ Bind(slow_path->GetExitLabel());
7920}
7921
Roland Levillain7c1559a2015-12-15 10:55:36 +00007922void CodeGeneratorX86::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7923 Location out,
7924 Location ref,
7925 Location obj,
7926 uint32_t offset,
7927 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007928 if (kEmitCompilerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007929 // Baker's read barriers shall be handled by the fast path
7930 // (CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier).
7931 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007932 // If heap poisoning is enabled, unpoisoning will be taken care of
7933 // by the runtime within the slow path.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007934 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007935 } else if (kPoisonHeapReferences) {
7936 __ UnpoisonHeapReference(out.AsRegister<Register>());
7937 }
7938}
7939
Roland Levillain7c1559a2015-12-15 10:55:36 +00007940void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7941 Location out,
7942 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007943 DCHECK(kEmitCompilerReadBarrier);
7944
Roland Levillain7c1559a2015-12-15 10:55:36 +00007945 // Insert a slow path based read barrier *after* the GC root load.
7946 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007947 // Note that GC roots are not affected by heap poisoning, so we do
7948 // not need to do anything special for this here.
7949 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007950 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007951 AddSlowPath(slow_path);
7952
Roland Levillain0d5a2812015-11-13 10:07:31 +00007953 __ jmp(slow_path->GetEntryLabel());
7954 __ Bind(slow_path->GetExitLabel());
7955}
7956
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007957void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007958 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007959 LOG(FATAL) << "Unreachable";
7960}
7961
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007962void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007963 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007964 LOG(FATAL) << "Unreachable";
7965}
7966
Mark Mendellfe57faa2015-09-18 09:26:15 -04007967// Simple implementation of packed switch - generate cascaded compare/jumps.
7968void LocationsBuilderX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7969 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007970 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007971 locations->SetInAt(0, Location::RequiresRegister());
7972}
7973
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007974void InstructionCodeGeneratorX86::GenPackedSwitchWithCompares(Register value_reg,
7975 int32_t lower_bound,
7976 uint32_t num_entries,
7977 HBasicBlock* switch_block,
7978 HBasicBlock* default_block) {
7979 // Figure out the correct compare values and jump conditions.
7980 // Handle the first compare/branch as a special case because it might
7981 // jump to the default case.
7982 DCHECK_GT(num_entries, 2u);
7983 Condition first_condition;
7984 uint32_t index;
7985 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7986 if (lower_bound != 0) {
7987 first_condition = kLess;
7988 __ cmpl(value_reg, Immediate(lower_bound));
7989 __ j(first_condition, codegen_->GetLabelOf(default_block));
7990 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007991
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007992 index = 1;
7993 } else {
7994 // Handle all the compare/jumps below.
7995 first_condition = kBelow;
7996 index = 0;
7997 }
7998
7999 // Handle the rest of the compare/jumps.
8000 for (; index + 1 < num_entries; index += 2) {
8001 int32_t compare_to_value = lower_bound + index + 1;
8002 __ cmpl(value_reg, Immediate(compare_to_value));
8003 // Jump to successors[index] if value < case_value[index].
8004 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
8005 // Jump to successors[index + 1] if value == case_value[index + 1].
8006 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
8007 }
8008
8009 if (index != num_entries) {
8010 // There are an odd number of entries. Handle the last one.
8011 DCHECK_EQ(index + 1, num_entries);
8012 __ cmpl(value_reg, Immediate(lower_bound + index));
8013 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008014 }
8015
8016 // And the default for any other value.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008017 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
8018 __ jmp(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04008019 }
8020}
8021
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008022void InstructionCodeGeneratorX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8023 int32_t lower_bound = switch_instr->GetStartValue();
8024 uint32_t num_entries = switch_instr->GetNumEntries();
8025 LocationSummary* locations = switch_instr->GetLocations();
8026 Register value_reg = locations->InAt(0).AsRegister<Register>();
8027
8028 GenPackedSwitchWithCompares(value_reg,
8029 lower_bound,
8030 num_entries,
8031 switch_instr->GetBlock(),
8032 switch_instr->GetDefaultBlock());
8033}
8034
Mark Mendell805b3b52015-09-18 14:10:29 -04008035void LocationsBuilderX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8036 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008037 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendell805b3b52015-09-18 14:10:29 -04008038 locations->SetInAt(0, Location::RequiresRegister());
8039
8040 // Constant area pointer.
8041 locations->SetInAt(1, Location::RequiresRegister());
8042
8043 // And the temporary we need.
8044 locations->AddTemp(Location::RequiresRegister());
8045}
8046
8047void InstructionCodeGeneratorX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
8048 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008049 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendell805b3b52015-09-18 14:10:29 -04008050 LocationSummary* locations = switch_instr->GetLocations();
8051 Register value_reg = locations->InAt(0).AsRegister<Register>();
8052 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8053
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008054 if (num_entries <= kPackedSwitchJumpTableThreshold) {
8055 GenPackedSwitchWithCompares(value_reg,
8056 lower_bound,
8057 num_entries,
8058 switch_instr->GetBlock(),
8059 default_block);
8060 return;
8061 }
8062
Mark Mendell805b3b52015-09-18 14:10:29 -04008063 // Optimizing has a jump area.
8064 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
8065 Register constant_area = locations->InAt(1).AsRegister<Register>();
8066
8067 // Remove the bias, if needed.
8068 if (lower_bound != 0) {
8069 __ leal(temp_reg, Address(value_reg, -lower_bound));
8070 value_reg = temp_reg;
8071 }
8072
8073 // Is the value in range?
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008074 DCHECK_GE(num_entries, 1u);
Mark Mendell805b3b52015-09-18 14:10:29 -04008075 __ cmpl(value_reg, Immediate(num_entries - 1));
8076 __ j(kAbove, codegen_->GetLabelOf(default_block));
8077
8078 // We are in the range of the table.
8079 // Load (target-constant_area) from the jump table, indexing by the value.
8080 __ movl(temp_reg, codegen_->LiteralCaseTable(switch_instr, constant_area, value_reg));
8081
8082 // Compute the actual target address by adding in constant_area.
8083 __ addl(temp_reg, constant_area);
8084
8085 // And jump.
8086 __ jmp(temp_reg);
8087}
8088
Mark Mendell0616ae02015-04-17 12:49:27 -04008089void LocationsBuilderX86::VisitX86ComputeBaseMethodAddress(
8090 HX86ComputeBaseMethodAddress* insn) {
8091 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008092 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008093 locations->SetOut(Location::RequiresRegister());
8094}
8095
8096void InstructionCodeGeneratorX86::VisitX86ComputeBaseMethodAddress(
8097 HX86ComputeBaseMethodAddress* insn) {
8098 LocationSummary* locations = insn->GetLocations();
8099 Register reg = locations->Out().AsRegister<Register>();
8100
8101 // Generate call to next instruction.
8102 Label next_instruction;
8103 __ call(&next_instruction);
8104 __ Bind(&next_instruction);
8105
8106 // Remember this offset for later use with constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008107 codegen_->AddMethodAddressOffset(insn, GetAssembler()->CodeSize());
Mark Mendell0616ae02015-04-17 12:49:27 -04008108
8109 // Grab the return address off the stack.
8110 __ popl(reg);
8111}
8112
8113void LocationsBuilderX86::VisitX86LoadFromConstantTable(
8114 HX86LoadFromConstantTable* insn) {
8115 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008116 new (GetGraph()->GetAllocator()) LocationSummary(insn, LocationSummary::kNoCall);
Mark Mendell0616ae02015-04-17 12:49:27 -04008117
8118 locations->SetInAt(0, Location::RequiresRegister());
8119 locations->SetInAt(1, Location::ConstantLocation(insn->GetConstant()));
8120
8121 // If we don't need to be materialized, we only need the inputs to be set.
David Brazdilb3e773e2016-01-26 11:28:37 +00008122 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008123 return;
8124 }
8125
8126 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008127 case DataType::Type::kFloat32:
8128 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008129 locations->SetOut(Location::RequiresFpuRegister());
8130 break;
8131
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008132 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008133 locations->SetOut(Location::RequiresRegister());
8134 break;
8135
8136 default:
8137 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8138 }
8139}
8140
8141void InstructionCodeGeneratorX86::VisitX86LoadFromConstantTable(HX86LoadFromConstantTable* insn) {
David Brazdilb3e773e2016-01-26 11:28:37 +00008142 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04008143 return;
8144 }
8145
8146 LocationSummary* locations = insn->GetLocations();
8147 Location out = locations->Out();
8148 Register const_area = locations->InAt(0).AsRegister<Register>();
8149 HConstant *value = insn->GetConstant();
8150
8151 switch (insn->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008152 case DataType::Type::kFloat32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008153 __ movss(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008154 codegen_->LiteralFloatAddress(
8155 value->AsFloatConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008156 break;
8157
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008158 case DataType::Type::kFloat64:
Mark Mendell0616ae02015-04-17 12:49:27 -04008159 __ movsd(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008160 codegen_->LiteralDoubleAddress(
8161 value->AsDoubleConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008162 break;
8163
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008164 case DataType::Type::kInt32:
Mark Mendell0616ae02015-04-17 12:49:27 -04008165 __ movl(out.AsRegister<Register>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008166 codegen_->LiteralInt32Address(
8167 value->AsIntConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04008168 break;
8169
8170 default:
8171 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
8172 }
8173}
8174
Mark Mendell0616ae02015-04-17 12:49:27 -04008175/**
8176 * Class to handle late fixup of offsets into constant area.
8177 */
Vladimir Marko5233f932015-09-29 19:01:15 +01008178class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
Mark Mendell0616ae02015-04-17 12:49:27 -04008179 public:
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008180 RIPFixup(CodeGeneratorX86& codegen,
8181 HX86ComputeBaseMethodAddress* base_method_address,
8182 size_t offset)
8183 : codegen_(&codegen),
8184 base_method_address_(base_method_address),
8185 offset_into_constant_area_(offset) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008186
8187 protected:
8188 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
8189
8190 CodeGeneratorX86* codegen_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008191 HX86ComputeBaseMethodAddress* base_method_address_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008192
8193 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01008194 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell0616ae02015-04-17 12:49:27 -04008195 // Patch the correct offset for the instruction. The place to patch is the
8196 // last 4 bytes of the instruction.
8197 // The value to patch is the distance from the offset in the constant area
8198 // from the address computed by the HX86ComputeBaseMethodAddress instruction.
Mark Mendell805b3b52015-09-18 14:10:29 -04008199 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008200 int32_t relative_position =
8201 constant_offset - codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell0616ae02015-04-17 12:49:27 -04008202
8203 // Patch in the right value.
8204 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
8205 }
8206
Mark Mendell0616ae02015-04-17 12:49:27 -04008207 // Location in constant area that the fixup refers to.
Mark Mendell805b3b52015-09-18 14:10:29 -04008208 int32_t offset_into_constant_area_;
Mark Mendell0616ae02015-04-17 12:49:27 -04008209};
8210
Mark Mendell805b3b52015-09-18 14:10:29 -04008211/**
8212 * Class to handle late fixup of offsets to a jump table that will be created in the
8213 * constant area.
8214 */
8215class JumpTableRIPFixup : public RIPFixup {
8216 public:
8217 JumpTableRIPFixup(CodeGeneratorX86& codegen, HX86PackedSwitch* switch_instr)
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008218 : RIPFixup(codegen, switch_instr->GetBaseMethodAddress(), static_cast<size_t>(-1)),
8219 switch_instr_(switch_instr) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04008220
8221 void CreateJumpTable() {
8222 X86Assembler* assembler = codegen_->GetAssembler();
8223
8224 // Ensure that the reference to the jump table has the correct offset.
8225 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
8226 SetOffset(offset_in_constant_table);
8227
8228 // The label values in the jump table are computed relative to the
8229 // instruction addressing the constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008230 const int32_t relative_offset = codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell805b3b52015-09-18 14:10:29 -04008231
8232 // Populate the jump table with the correct values for the jump table.
8233 int32_t num_entries = switch_instr_->GetNumEntries();
8234 HBasicBlock* block = switch_instr_->GetBlock();
8235 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
8236 // The value that we want is the target offset - the position of the table.
8237 for (int32_t i = 0; i < num_entries; i++) {
8238 HBasicBlock* b = successors[i];
8239 Label* l = codegen_->GetLabelOf(b);
8240 DCHECK(l->IsBound());
8241 int32_t offset_to_block = l->Position() - relative_offset;
8242 assembler->AppendInt32(offset_to_block);
8243 }
8244 }
8245
8246 private:
8247 const HX86PackedSwitch* switch_instr_;
8248};
8249
8250void CodeGeneratorX86::Finalize(CodeAllocator* allocator) {
8251 // Generate the constant area if needed.
8252 X86Assembler* assembler = GetAssembler();
jaishank20d1c942019-03-08 15:08:17 +05308253
Mark Mendell805b3b52015-09-18 14:10:29 -04008254 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
8255 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8
8256 // byte values.
8257 assembler->Align(4, 0);
8258 constant_area_start_ = assembler->CodeSize();
8259
8260 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008261 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell805b3b52015-09-18 14:10:29 -04008262 jump_table->CreateJumpTable();
8263 }
8264
8265 // And now add the constant area to the generated code.
8266 assembler->AddConstantArea();
8267 }
8268
8269 // And finish up.
8270 CodeGenerator::Finalize(allocator);
8271}
8272
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008273Address CodeGeneratorX86::LiteralDoubleAddress(double v,
8274 HX86ComputeBaseMethodAddress* method_base,
8275 Register reg) {
8276 AssemblerFixup* fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008277 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddDouble(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008278 return Address(reg, kDummy32BitOffset, fixup);
8279}
8280
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008281Address CodeGeneratorX86::LiteralFloatAddress(float v,
8282 HX86ComputeBaseMethodAddress* method_base,
8283 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008284 AssemblerFixup* fixup =
8285 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddFloat(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008286 return Address(reg, kDummy32BitOffset, fixup);
8287}
8288
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008289Address CodeGeneratorX86::LiteralInt32Address(int32_t v,
8290 HX86ComputeBaseMethodAddress* method_base,
8291 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008292 AssemblerFixup* fixup =
8293 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt32(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008294 return Address(reg, kDummy32BitOffset, fixup);
8295}
8296
Nicolas Geoffray133719e2017-01-22 15:44:39 +00008297Address CodeGeneratorX86::LiteralInt64Address(int64_t v,
8298 HX86ComputeBaseMethodAddress* method_base,
8299 Register reg) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008300 AssemblerFixup* fixup =
8301 new (GetGraph()->GetAllocator()) RIPFixup(*this, method_base, __ AddInt64(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04008302 return Address(reg, kDummy32BitOffset, fixup);
8303}
8304
Aart Bika19616e2016-02-01 18:57:58 -08008305void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) {
8306 if (value == 0) {
8307 __ xorl(dest, dest);
8308 } else {
8309 __ movl(dest, Immediate(value));
8310 }
8311}
8312
8313void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) {
8314 if (value == 0) {
8315 __ testl(dest, dest);
8316 } else {
8317 __ cmpl(dest, Immediate(value));
8318 }
8319}
8320
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008321void CodeGeneratorX86::GenerateIntCompare(Location lhs, Location rhs) {
8322 Register lhs_reg = lhs.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07008323 GenerateIntCompare(lhs_reg, rhs);
8324}
8325
8326void CodeGeneratorX86::GenerateIntCompare(Register lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008327 if (rhs.IsConstant()) {
8328 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07008329 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008330 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07008331 __ cmpl(lhs, Address(ESP, rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008332 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07008333 __ cmpl(lhs, rhs.AsRegister<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01008334 }
8335}
8336
8337Address CodeGeneratorX86::ArrayAddress(Register obj,
8338 Location index,
8339 ScaleFactor scale,
8340 uint32_t data_offset) {
8341 return index.IsConstant() ?
8342 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
8343 Address(obj, index.AsRegister<Register>(), scale, data_offset);
8344}
8345
Mark Mendell805b3b52015-09-18 14:10:29 -04008346Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
8347 Register reg,
8348 Register value) {
8349 // Create a fixup to be used to create and address the jump table.
8350 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01008351 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell805b3b52015-09-18 14:10:29 -04008352
8353 // We have to populate the jump tables.
8354 fixups_to_jump_tables_.push_back(table_fixup);
8355
8356 // We want a scaled address, as we are extracting the correct offset from the table.
8357 return Address(reg, value, TIMES_4, kDummy32BitOffset, table_fixup);
8358}
8359
Andreas Gampe85b62f22015-09-09 13:15:38 -07008360// TODO: target as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008361void CodeGeneratorX86::MoveFromReturnRegister(Location target, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07008362 if (!target.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008363 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008364 return;
8365 }
8366
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008367 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008368
8369 Location return_loc = InvokeDexCallingConventionVisitorX86().GetReturnLocation(type);
8370 if (target.Equals(return_loc)) {
8371 return;
8372 }
8373
8374 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
8375 // with the else branch.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008376 if (type == DataType::Type::kInt64) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01008377 HParallelMove parallel_move(GetGraph()->GetAllocator());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01008378 parallel_move.AddMove(return_loc.ToLow(), target.ToLow(), DataType::Type::kInt32, nullptr);
8379 parallel_move.AddMove(return_loc.ToHigh(), target.ToHigh(), DataType::Type::kInt32, nullptr);
Andreas Gampe85b62f22015-09-09 13:15:38 -07008380 GetMoveResolver()->EmitNativeCode(&parallel_move);
8381 } else {
8382 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01008383 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07008384 parallel_move.AddMove(return_loc, target, type, nullptr);
8385 GetMoveResolver()->EmitNativeCode(&parallel_move);
8386 }
8387}
8388
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008389void CodeGeneratorX86::PatchJitRootUse(uint8_t* code,
8390 const uint8_t* roots_data,
8391 const PatchInfo<Label>& info,
8392 uint64_t index_in_table) const {
8393 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
8394 uintptr_t address =
8395 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00008396 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008397 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
8398 dchecked_integral_cast<uint32_t>(address);
8399}
8400
Nicolas Geoffray132d8362016-11-16 09:19:42 +00008401void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
8402 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008403 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01008404 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008405 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00008406 }
8407
8408 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00008409 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01008410 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01008411 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00008412 }
8413}
8414
xueliang.zhonge0eb4832017-10-30 13:43:14 +00008415void LocationsBuilderX86::VisitIntermediateAddress(HIntermediateAddress* instruction
8416 ATTRIBUTE_UNUSED) {
8417 LOG(FATAL) << "Unreachable";
8418}
8419
8420void InstructionCodeGeneratorX86::VisitIntermediateAddress(HIntermediateAddress* instruction
8421 ATTRIBUTE_UNUSED) {
8422 LOG(FATAL) << "Unreachable";
8423}
8424
Roland Levillain4d027112015-07-01 15:41:14 +01008425#undef __
8426
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00008427} // namespace x86
8428} // namespace art