| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1 | /* | 
 | 2 |  * Copyright (C) 2014 The Android Open Source Project | 
 | 3 |  * | 
 | 4 |  * Licensed under the Apache License, Version 2.0 (the "License"); | 
 | 5 |  * you may not use this file except in compliance with the License. | 
 | 6 |  * You may obtain a copy of the License at | 
 | 7 |  * | 
 | 8 |  *      http://www.apache.org/licenses/LICENSE-2.0 | 
 | 9 |  * | 
 | 10 |  * Unless required by applicable law or agreed to in writing, software | 
 | 11 |  * distributed under the License is distributed on an "AS IS" BASIS, | 
 | 12 |  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
 | 13 |  * See the License for the specific language governing permissions and | 
 | 14 |  * limitations under the License. | 
 | 15 |  */ | 
 | 16 |  | 
 | 17 | #include "code_generator_arm64.h" | 
 | 18 |  | 
| Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 19 | #include "arch/arm64/instruction_set_features_arm64.h" | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 20 | #include "art_method.h" | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 21 | #include "code_generator_utils.h" | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 22 | #include "compiled_method.h" | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 23 | #include "entrypoints/quick/quick_entrypoints.h" | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 24 | #include "entrypoints/quick/quick_entrypoints_enum.h" | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 25 | #include "gc/accounting/card_table.h" | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 26 | #include "intrinsics.h" | 
 | 27 | #include "intrinsics_arm64.h" | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 28 | #include "mirror/array-inl.h" | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 29 | #include "mirror/class-inl.h" | 
| Calin Juravle | cd6dffe | 2015-01-08 17:35:35 +0000 | [diff] [blame] | 30 | #include "offsets.h" | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 31 | #include "thread.h" | 
 | 32 | #include "utils/arm64/assembler_arm64.h" | 
 | 33 | #include "utils/assembler.h" | 
 | 34 | #include "utils/stack_checks.h" | 
 | 35 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 36 | using namespace vixl::aarch64;  // NOLINT(build/namespaces) | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 37 |  | 
 | 38 | #ifdef __ | 
 | 39 | #error "ARM64 Codegen VIXL macro-assembler macro already defined." | 
 | 40 | #endif | 
 | 41 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 42 | namespace art { | 
 | 43 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 44 | template<class MirrorType> | 
 | 45 | class GcRoot; | 
 | 46 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 47 | namespace arm64 { | 
 | 48 |  | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 49 | using helpers::ARM64EncodableConstantOrRegister; | 
 | 50 | using helpers::ArtVixlRegCodeCoherentForRegSet; | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 51 | using helpers::CPURegisterFrom; | 
 | 52 | using helpers::DRegisterFrom; | 
 | 53 | using helpers::FPRegisterFrom; | 
 | 54 | using helpers::HeapOperand; | 
 | 55 | using helpers::HeapOperandFrom; | 
 | 56 | using helpers::InputCPURegisterAt; | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 57 | using helpers::InputCPURegisterOrZeroRegAt; | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 58 | using helpers::InputFPRegisterAt; | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 59 | using helpers::InputOperandAt; | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 60 | using helpers::InputRegisterAt; | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 61 | using helpers::Int64ConstantFrom; | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 62 | using helpers::IsConstantZeroBitPattern; | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 63 | using helpers::LocationFrom; | 
 | 64 | using helpers::OperandFromMemOperand; | 
 | 65 | using helpers::OutputCPURegister; | 
 | 66 | using helpers::OutputFPRegister; | 
 | 67 | using helpers::OutputRegister; | 
 | 68 | using helpers::RegisterFrom; | 
 | 69 | using helpers::StackOperandFrom; | 
 | 70 | using helpers::VIXLRegCodeFromART; | 
 | 71 | using helpers::WRegisterFrom; | 
 | 72 | using helpers::XRegisterFrom; | 
 | 73 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 74 | static constexpr int kCurrentMethodStackOffset = 0; | 
| Vladimir Marko | f3e0ee2 | 2015-12-17 15:23:13 +0000 | [diff] [blame] | 75 | // The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 76 | // table version generates 7 instructions and num_entries literals. Compare/jump sequence will | 
 | 77 | // generates less code/data with a small num_entries. | 
| Vladimir Marko | f3e0ee2 | 2015-12-17 15:23:13 +0000 | [diff] [blame] | 78 | static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7; | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 79 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 80 | inline Condition ARM64Condition(IfCondition cond) { | 
 | 81 |   switch (cond) { | 
 | 82 |     case kCondEQ: return eq; | 
 | 83 |     case kCondNE: return ne; | 
 | 84 |     case kCondLT: return lt; | 
 | 85 |     case kCondLE: return le; | 
 | 86 |     case kCondGT: return gt; | 
 | 87 |     case kCondGE: return ge; | 
| Aart Bik | e9f3760 | 2015-10-09 11:15:55 -0700 | [diff] [blame] | 88 |     case kCondB:  return lo; | 
 | 89 |     case kCondBE: return ls; | 
 | 90 |     case kCondA:  return hi; | 
 | 91 |     case kCondAE: return hs; | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 92 |   } | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 93 |   LOG(FATAL) << "Unreachable"; | 
 | 94 |   UNREACHABLE(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 95 | } | 
 | 96 |  | 
| Vladimir Marko | d6e069b | 2016-01-18 11:11:01 +0000 | [diff] [blame] | 97 | inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) { | 
 | 98 |   // The ARM64 condition codes can express all the necessary branches, see the | 
 | 99 |   // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual. | 
 | 100 |   // There is no dex instruction or HIR that would need the missing conditions | 
 | 101 |   // "equal or unordered" or "not equal". | 
 | 102 |   switch (cond) { | 
 | 103 |     case kCondEQ: return eq; | 
 | 104 |     case kCondNE: return ne /* unordered */; | 
 | 105 |     case kCondLT: return gt_bias ? cc : lt /* unordered */; | 
 | 106 |     case kCondLE: return gt_bias ? ls : le /* unordered */; | 
 | 107 |     case kCondGT: return gt_bias ? hi /* unordered */ : gt; | 
 | 108 |     case kCondGE: return gt_bias ? cs /* unordered */ : ge; | 
 | 109 |     default: | 
 | 110 |       LOG(FATAL) << "UNREACHABLE"; | 
 | 111 |       UNREACHABLE(); | 
 | 112 |   } | 
 | 113 | } | 
 | 114 |  | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 115 | Location ARM64ReturnLocation(Primitive::Type return_type) { | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 116 |   // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the | 
 | 117 |   // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`, | 
 | 118 |   // but we use the exact registers for clarity. | 
 | 119 |   if (return_type == Primitive::kPrimFloat) { | 
 | 120 |     return LocationFrom(s0); | 
 | 121 |   } else if (return_type == Primitive::kPrimDouble) { | 
 | 122 |     return LocationFrom(d0); | 
 | 123 |   } else if (return_type == Primitive::kPrimLong) { | 
 | 124 |     return LocationFrom(x0); | 
| Nicolas Geoffray | 925e562 | 2015-06-03 12:23:32 +0100 | [diff] [blame] | 125 |   } else if (return_type == Primitive::kPrimVoid) { | 
 | 126 |     return Location::NoLocation(); | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 127 |   } else { | 
 | 128 |     return LocationFrom(w0); | 
 | 129 |   } | 
 | 130 | } | 
 | 131 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 132 | Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) { | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 133 |   return ARM64ReturnLocation(return_type); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 134 | } | 
 | 135 |  | 
| Roland Levillain | 7cbd27f | 2016-08-11 23:53:33 +0100 | [diff] [blame] | 136 | // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. | 
 | 137 | #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->  // NOLINT | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 138 | #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value() | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 139 |  | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 140 | // Calculate memory accessing operand for save/restore live registers. | 
 | 141 | static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen, | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 142 |                                            LocationSummary* locations, | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 143 |                                            int64_t spill_offset, | 
 | 144 |                                            bool is_save) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 145 |   const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true); | 
 | 146 |   const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false); | 
 | 147 |   DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills, | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 148 |                                          codegen->GetNumberOfCoreRegisters(), | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 149 |                                          fp_spills, | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 150 |                                          codegen->GetNumberOfFloatingPointRegisters())); | 
 | 151 |  | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 152 |   CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills); | 
 | 153 |   CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills); | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 154 |  | 
 | 155 |   MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler(); | 
 | 156 |   UseScratchRegisterScope temps(masm); | 
 | 157 |  | 
 | 158 |   Register base = masm->StackPointer(); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 159 |   int64_t core_spill_size = core_list.GetTotalSizeInBytes(); | 
 | 160 |   int64_t fp_spill_size = fp_list.GetTotalSizeInBytes(); | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 161 |   int64_t reg_size = kXRegSizeInBytes; | 
 | 162 |   int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size; | 
 | 163 |   uint32_t ls_access_size = WhichPowerOf2(reg_size); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 164 |   if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) && | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 165 |       !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) { | 
 | 166 |     // If the offset does not fit in the instruction's immediate field, use an alternate register | 
 | 167 |     // to compute the base address(float point registers spill base address). | 
 | 168 |     Register new_base = temps.AcquireSameSizeAs(base); | 
 | 169 |     __ Add(new_base, base, Operand(spill_offset + core_spill_size)); | 
 | 170 |     base = new_base; | 
 | 171 |     spill_offset = -core_spill_size; | 
 | 172 |     int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size; | 
 | 173 |     DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size)); | 
 | 174 |     DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size)); | 
 | 175 |   } | 
 | 176 |  | 
 | 177 |   if (is_save) { | 
 | 178 |     __ StoreCPURegList(core_list, MemOperand(base, spill_offset)); | 
 | 179 |     __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size)); | 
 | 180 |   } else { | 
 | 181 |     __ LoadCPURegList(core_list, MemOperand(base, spill_offset)); | 
 | 182 |     __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size)); | 
 | 183 |   } | 
 | 184 | } | 
 | 185 |  | 
 | 186 | void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 187 |   size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 188 |   const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true); | 
 | 189 |   for (uint32_t i : LowToHighBits(core_spills)) { | 
 | 190 |     // If the register holds an object, update the stack mask. | 
 | 191 |     if (locations->RegisterContainsObject(i)) { | 
 | 192 |       locations->SetStackBit(stack_offset / kVRegSize); | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 193 |     } | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 194 |     DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); | 
 | 195 |     DCHECK_LT(i, kMaximumNumberOfExpectedRegisters); | 
 | 196 |     saved_core_stack_offsets_[i] = stack_offset; | 
 | 197 |     stack_offset += kXRegSizeInBytes; | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 198 |   } | 
 | 199 |  | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 200 |   const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false); | 
 | 201 |   for (uint32_t i : LowToHighBits(fp_spills)) { | 
 | 202 |     DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); | 
 | 203 |     DCHECK_LT(i, kMaximumNumberOfExpectedRegisters); | 
 | 204 |     saved_fpu_stack_offsets_[i] = stack_offset; | 
 | 205 |     stack_offset += kDRegSizeInBytes; | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 206 |   } | 
 | 207 |  | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 208 |   SaveRestoreLiveRegistersHelper(codegen, | 
 | 209 |                                  locations, | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 210 |                                  codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */); | 
 | 211 | } | 
 | 212 |  | 
 | 213 | void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 214 |   SaveRestoreLiveRegistersHelper(codegen, | 
 | 215 |                                  locations, | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 216 |                                  codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */); | 
 | 217 | } | 
 | 218 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 219 | class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 220 |  public: | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 221 |   explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {} | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 222 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 223 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
| Serban Constantinescu | 5a6cc49 | 2015-08-13 15:20:25 +0100 | [diff] [blame] | 224 |     LocationSummary* locations = instruction_->GetLocations(); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 225 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
| Serban Constantinescu | 5a6cc49 | 2015-08-13 15:20:25 +0100 | [diff] [blame] | 226 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 227 |     __ Bind(GetEntryLabel()); | 
| David Brazdil | 77a48ae | 2015-09-15 12:34:04 +0000 | [diff] [blame] | 228 |     if (instruction_->CanThrowIntoCatchBlock()) { | 
 | 229 |       // Live registers will be restored in the catch block if caught. | 
 | 230 |       SaveLiveRegisters(codegen, instruction_->GetLocations()); | 
 | 231 |     } | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 232 |     // We're moving two locations to locations that could overlap, so we need a parallel | 
 | 233 |     // move resolver. | 
 | 234 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 235 |     codegen->EmitParallelMoves( | 
| Serban Constantinescu | 5a6cc49 | 2015-08-13 15:20:25 +0100 | [diff] [blame] | 236 |         locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt, | 
 | 237 |         locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 238 |     QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt() | 
 | 239 |         ? kQuickThrowStringBounds | 
 | 240 |         : kQuickThrowArrayBounds; | 
 | 241 |     arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this); | 
| Vladimir Marko | 87f3fcb | 2016-04-28 15:52:11 +0100 | [diff] [blame] | 242 |     CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>(); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 243 |     CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 244 |   } | 
 | 245 |  | 
| Alexandre Rames | 8158f28 | 2015-08-07 10:26:17 +0100 | [diff] [blame] | 246 |   bool IsFatal() const OVERRIDE { return true; } | 
 | 247 |  | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 248 |   const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; } | 
 | 249 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 250 |  private: | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 251 |   DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64); | 
 | 252 | }; | 
 | 253 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 254 | class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 255 |  public: | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 256 |   explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {} | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 257 |  | 
 | 258 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 259 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
 | 260 |     __ Bind(GetEntryLabel()); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 261 |     arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 262 |     CheckEntrypointTypes<kQuickThrowDivZero, void, void>(); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 263 |   } | 
 | 264 |  | 
| Alexandre Rames | 8158f28 | 2015-08-07 10:26:17 +0100 | [diff] [blame] | 265 |   bool IsFatal() const OVERRIDE { return true; } | 
 | 266 |  | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 267 |   const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; } | 
 | 268 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 269 |  private: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 270 |   DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64); | 
 | 271 | }; | 
 | 272 |  | 
 | 273 | class LoadClassSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 274 |  public: | 
 | 275 |   LoadClassSlowPathARM64(HLoadClass* cls, | 
 | 276 |                          HInstruction* at, | 
 | 277 |                          uint32_t dex_pc, | 
 | 278 |                          bool do_clinit) | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 279 |       : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 280 |     DCHECK(at->IsLoadClass() || at->IsClinitCheck()); | 
 | 281 |   } | 
 | 282 |  | 
 | 283 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 284 |     LocationSummary* locations = at_->GetLocations(); | 
 | 285 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
 | 286 |  | 
 | 287 |     __ Bind(GetEntryLabel()); | 
| Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 288 |     SaveLiveRegisters(codegen, locations); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 289 |  | 
 | 290 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 291 |     __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex()); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 292 |     QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage | 
 | 293 |                                                 : kQuickInitializeType; | 
 | 294 |     arm64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 295 |     if (do_clinit_) { | 
| Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 296 |       CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>(); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 297 |     } else { | 
| Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 298 |       CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>(); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 299 |     } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 300 |  | 
 | 301 |     // Move the class to the desired location. | 
 | 302 |     Location out = locations->Out(); | 
 | 303 |     if (out.IsValid()) { | 
 | 304 |       DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg())); | 
 | 305 |       Primitive::Type type = at_->GetType(); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 306 |       arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 307 |     } | 
 | 308 |  | 
| Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 309 |     RestoreLiveRegisters(codegen, locations); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 310 |     __ B(GetExitLabel()); | 
 | 311 |   } | 
 | 312 |  | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 313 |   const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; } | 
 | 314 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 315 |  private: | 
 | 316 |   // The class this slow path will load. | 
 | 317 |   HLoadClass* const cls_; | 
 | 318 |  | 
 | 319 |   // The instruction where this slow path is happening. | 
 | 320 |   // (Might be the load class or an initialization check). | 
 | 321 |   HInstruction* const at_; | 
 | 322 |  | 
 | 323 |   // The dex PC of `at_`. | 
 | 324 |   const uint32_t dex_pc_; | 
 | 325 |  | 
 | 326 |   // Whether to initialize the class. | 
 | 327 |   const bool do_clinit_; | 
 | 328 |  | 
 | 329 |   DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64); | 
 | 330 | }; | 
 | 331 |  | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 332 | class LoadStringSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 333 |  public: | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 334 |   LoadStringSlowPathARM64(HLoadString* instruction, Register temp, vixl::aarch64::Label* adrp_label) | 
 | 335 |       : SlowPathCodeARM64(instruction), | 
 | 336 |         temp_(temp), | 
 | 337 |         adrp_label_(adrp_label) {} | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 338 |  | 
 | 339 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 340 |     LocationSummary* locations = instruction_->GetLocations(); | 
 | 341 |     DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); | 
 | 342 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
 | 343 |  | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 344 |     // temp_ is a scratch register. Make sure it's not used for saving/restoring registers. | 
 | 345 |     UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler()); | 
 | 346 |     temps.Exclude(temp_); | 
 | 347 |  | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 348 |     __ Bind(GetEntryLabel()); | 
 | 349 |     SaveLiveRegisters(codegen, locations); | 
 | 350 |  | 
 | 351 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 352 |     const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex(); | 
 | 353 |     __ Mov(calling_convention.GetRegisterAt(0).W(), string_index); | 
 | 354 |     arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this); | 
 | 355 |     CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); | 
 | 356 |     Primitive::Type type = instruction_->GetType(); | 
 | 357 |     arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type); | 
 | 358 |  | 
 | 359 |     RestoreLiveRegisters(codegen, locations); | 
 | 360 |  | 
 | 361 |     // Store the resolved String to the BSS entry. | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 362 |     const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile(); | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 363 |     if (!kUseReadBarrier || kUseBakerReadBarrier) { | 
 | 364 |       // The string entry page address was preserved in temp_ thanks to kSaveEverything. | 
 | 365 |     } else { | 
 | 366 |       // For non-Baker read barrier, we need to re-calculate the address of the string entry page. | 
 | 367 |       adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index); | 
 | 368 |       arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_); | 
 | 369 |     } | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 370 |     vixl::aarch64::Label* strp_label = | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 371 |         arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 372 |     { | 
 | 373 |       SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler()); | 
 | 374 |       __ Bind(strp_label); | 
 | 375 |       __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot), | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 376 |              MemOperand(temp_, /* offset placeholder */ 0)); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 377 |     } | 
 | 378 |  | 
 | 379 |     __ B(GetExitLabel()); | 
 | 380 |   } | 
 | 381 |  | 
 | 382 |   const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; } | 
 | 383 |  | 
 | 384 |  private: | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 385 |   const Register temp_; | 
 | 386 |   vixl::aarch64::Label* adrp_label_; | 
 | 387 |  | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 388 |   DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64); | 
 | 389 | }; | 
 | 390 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 391 | class NullCheckSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 392 |  public: | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 393 |   explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {} | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 394 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 395 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 396 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 397 |     __ Bind(GetEntryLabel()); | 
| David Brazdil | 77a48ae | 2015-09-15 12:34:04 +0000 | [diff] [blame] | 398 |     if (instruction_->CanThrowIntoCatchBlock()) { | 
 | 399 |       // Live registers will be restored in the catch block if caught. | 
 | 400 |       SaveLiveRegisters(codegen, instruction_->GetLocations()); | 
 | 401 |     } | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 402 |     arm64_codegen->InvokeRuntime(kQuickThrowNullPointer, | 
 | 403 |                                  instruction_, | 
 | 404 |                                  instruction_->GetDexPc(), | 
 | 405 |                                  this); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 406 |     CheckEntrypointTypes<kQuickThrowNullPointer, void, void>(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 407 |   } | 
 | 408 |  | 
| Alexandre Rames | 8158f28 | 2015-08-07 10:26:17 +0100 | [diff] [blame] | 409 |   bool IsFatal() const OVERRIDE { return true; } | 
 | 410 |  | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 411 |   const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; } | 
 | 412 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 413 |  private: | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 414 |   DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64); | 
 | 415 | }; | 
 | 416 |  | 
 | 417 | class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 418 |  public: | 
| Roland Levillain | 3887c46 | 2015-08-12 18:15:42 +0100 | [diff] [blame] | 419 |   SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor) | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 420 |       : SlowPathCodeARM64(instruction), successor_(successor) {} | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 421 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 422 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 423 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 424 |     __ Bind(GetEntryLabel()); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 425 |     arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 426 |     CheckEntrypointTypes<kQuickTestSuspend, void, void>(); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 427 |     if (successor_ == nullptr) { | 
 | 428 |       __ B(GetReturnLabel()); | 
 | 429 |     } else { | 
 | 430 |       __ B(arm64_codegen->GetLabelOf(successor_)); | 
 | 431 |     } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 432 |   } | 
 | 433 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 434 |   vixl::aarch64::Label* GetReturnLabel() { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 435 |     DCHECK(successor_ == nullptr); | 
 | 436 |     return &return_label_; | 
 | 437 |   } | 
 | 438 |  | 
| Nicolas Geoffray | db216f4 | 2015-05-05 17:02:20 +0100 | [diff] [blame] | 439 |   HBasicBlock* GetSuccessor() const { | 
 | 440 |     return successor_; | 
 | 441 |   } | 
 | 442 |  | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 443 |   const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; } | 
 | 444 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 445 |  private: | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 446 |   // If not null, the block to branch to after the suspend check. | 
 | 447 |   HBasicBlock* const successor_; | 
 | 448 |  | 
 | 449 |   // If `successor_` is null, the label to branch to after the suspend check. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 450 |   vixl::aarch64::Label return_label_; | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 451 |  | 
 | 452 |   DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64); | 
 | 453 | }; | 
 | 454 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 455 | class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 456 |  public: | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 457 |   TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal) | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 458 |       : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {} | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 459 |  | 
 | 460 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 461 |     LocationSummary* locations = instruction_->GetLocations(); | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 462 |     Location arg0, arg1; | 
 | 463 |     if (instruction_->IsInstanceOf()) { | 
 | 464 |       arg0 = locations->InAt(1); | 
 | 465 |       arg1 = locations->Out(); | 
 | 466 |     } else { | 
 | 467 |       arg0 = locations->InAt(0); | 
 | 468 |       arg1 = locations->InAt(1); | 
 | 469 |     } | 
 | 470 |  | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 471 |     DCHECK(instruction_->IsCheckCast() | 
 | 472 |            || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); | 
 | 473 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
| Serban Constantinescu | 5a6cc49 | 2015-08-13 15:20:25 +0100 | [diff] [blame] | 474 |     uint32_t dex_pc = instruction_->GetDexPc(); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 475 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 476 |     __ Bind(GetEntryLabel()); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 477 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 478 |     if (!is_fatal_) { | 
 | 479 |       SaveLiveRegisters(codegen, locations); | 
 | 480 |     } | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 481 |  | 
 | 482 |     // We're moving two locations to locations that could overlap, so we need a parallel | 
 | 483 |     // move resolver. | 
 | 484 |     InvokeRuntimeCallingConvention calling_convention; | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 485 |     codegen->EmitParallelMoves(arg0, | 
 | 486 |                                LocationFrom(calling_convention.GetRegisterAt(0)), | 
 | 487 |                                Primitive::kPrimNot, | 
 | 488 |                                arg1, | 
 | 489 |                                LocationFrom(calling_convention.GetRegisterAt(1)), | 
 | 490 |                                Primitive::kPrimNot); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 491 |     if (instruction_->IsInstanceOf()) { | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 492 |       arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this); | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 493 |       CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Class*, mirror::Class*>(); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 494 |       Primitive::Type ret_type = instruction_->GetType(); | 
 | 495 |       Location ret_loc = calling_convention.GetReturnLocation(ret_type); | 
 | 496 |       arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type); | 
 | 497 |     } else { | 
 | 498 |       DCHECK(instruction_->IsCheckCast()); | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 499 |       arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this); | 
 | 500 |       CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>(); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 501 |     } | 
 | 502 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 503 |     if (!is_fatal_) { | 
 | 504 |       RestoreLiveRegisters(codegen, locations); | 
 | 505 |       __ B(GetExitLabel()); | 
 | 506 |     } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 507 |   } | 
 | 508 |  | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 509 |   const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; } | 
| Roland Levillain | f41f956 | 2016-09-14 19:26:48 +0100 | [diff] [blame] | 510 |   bool IsFatal() const OVERRIDE { return is_fatal_; } | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 511 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 512 |  private: | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 513 |   const bool is_fatal_; | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 514 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 515 |   DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64); | 
 | 516 | }; | 
 | 517 |  | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 518 | class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 519 |  public: | 
| Aart Bik | 42249c3 | 2016-01-07 15:33:50 -0800 | [diff] [blame] | 520 |   explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction) | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 521 |       : SlowPathCodeARM64(instruction) {} | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 522 |  | 
 | 523 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
| Aart Bik | 42249c3 | 2016-01-07 15:33:50 -0800 | [diff] [blame] | 524 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 525 |     __ Bind(GetEntryLabel()); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 526 |     arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this); | 
| Roland Levillain | 888d067 | 2015-11-23 18:53:50 +0000 | [diff] [blame] | 527 |     CheckEntrypointTypes<kQuickDeoptimize, void, void>(); | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 528 |   } | 
 | 529 |  | 
| Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 530 |   const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; } | 
 | 531 |  | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 532 |  private: | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 533 |   DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64); | 
 | 534 | }; | 
 | 535 |  | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 536 | class ArraySetSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 537 |  public: | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 538 |   explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {} | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 539 |  | 
 | 540 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 541 |     LocationSummary* locations = instruction_->GetLocations(); | 
 | 542 |     __ Bind(GetEntryLabel()); | 
 | 543 |     SaveLiveRegisters(codegen, locations); | 
 | 544 |  | 
 | 545 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 546 |     HParallelMove parallel_move(codegen->GetGraph()->GetArena()); | 
 | 547 |     parallel_move.AddMove( | 
 | 548 |         locations->InAt(0), | 
 | 549 |         LocationFrom(calling_convention.GetRegisterAt(0)), | 
 | 550 |         Primitive::kPrimNot, | 
 | 551 |         nullptr); | 
 | 552 |     parallel_move.AddMove( | 
 | 553 |         locations->InAt(1), | 
 | 554 |         LocationFrom(calling_convention.GetRegisterAt(1)), | 
 | 555 |         Primitive::kPrimInt, | 
 | 556 |         nullptr); | 
 | 557 |     parallel_move.AddMove( | 
 | 558 |         locations->InAt(2), | 
 | 559 |         LocationFrom(calling_convention.GetRegisterAt(2)), | 
 | 560 |         Primitive::kPrimNot, | 
 | 561 |         nullptr); | 
 | 562 |     codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); | 
 | 563 |  | 
 | 564 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 565 |     arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 566 |     CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>(); | 
 | 567 |     RestoreLiveRegisters(codegen, locations); | 
 | 568 |     __ B(GetExitLabel()); | 
 | 569 |   } | 
 | 570 |  | 
 | 571 |   const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; } | 
 | 572 |  | 
 | 573 |  private: | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 574 |   DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64); | 
 | 575 | }; | 
 | 576 |  | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 577 | void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) { | 
 | 578 |   uint32_t num_entries = switch_instr_->GetNumEntries(); | 
| Vladimir Marko | f3e0ee2 | 2015-12-17 15:23:13 +0000 | [diff] [blame] | 579 |   DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold); | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 580 |  | 
 | 581 |   // We are about to use the assembler to place literals directly. Make sure we have enough | 
 | 582 |   // underlying code buffer and we have generated the jump table with right size. | 
 | 583 |   CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t), | 
 | 584 |                              CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize); | 
 | 585 |  | 
 | 586 |   __ Bind(&table_start_); | 
 | 587 |   const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors(); | 
 | 588 |   for (uint32_t i = 0; i < num_entries; i++) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 589 |     vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]); | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 590 |     DCHECK(target_label->IsBound()); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 591 |     ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation(); | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 592 |     DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min()); | 
 | 593 |     DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max()); | 
 | 594 |     Literal<int32_t> literal(jump_offset); | 
 | 595 |     __ place(&literal); | 
 | 596 |   } | 
 | 597 | } | 
 | 598 |  | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 599 | // Slow path marking an object reference `ref` during a read | 
 | 600 | // barrier. The field `obj.field` in the object `obj` holding this | 
 | 601 | // reference does not get updated by this slow path after marking (see | 
 | 602 | // ReadBarrierMarkAndUpdateFieldSlowPathARM64 below for that). | 
 | 603 | // | 
 | 604 | // This means that after the execution of this slow path, `ref` will | 
 | 605 | // always be up-to-date, but `obj.field` may not; i.e., after the | 
 | 606 | // flip, `ref` will be a to-space reference, but `obj.field` will | 
 | 607 | // probably still be a from-space reference (unless it gets updated by | 
 | 608 | // another thread, or if another thread installed another object | 
 | 609 | // reference (different from `ref`) in `obj.field`). | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 610 | class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 611 |  public: | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 612 |   ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location ref) | 
 | 613 |       : SlowPathCodeARM64(instruction), ref_(ref) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 614 |     DCHECK(kEmitCompilerReadBarrier); | 
 | 615 |   } | 
 | 616 |  | 
 | 617 |   const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; } | 
 | 618 |  | 
 | 619 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 620 |     LocationSummary* locations = instruction_->GetLocations(); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 621 |     DCHECK(locations->CanCall()); | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 622 |     DCHECK(ref_.IsRegister()) << ref_; | 
 | 623 |     DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg(); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 624 |     DCHECK(instruction_->IsInstanceFieldGet() || | 
 | 625 |            instruction_->IsStaticFieldGet() || | 
 | 626 |            instruction_->IsArrayGet() || | 
| Roland Levillain | 16d9f94 | 2016-08-25 17:27:56 +0100 | [diff] [blame] | 627 |            instruction_->IsArraySet() || | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 628 |            instruction_->IsLoadClass() || | 
 | 629 |            instruction_->IsLoadString() || | 
 | 630 |            instruction_->IsInstanceOf() || | 
| Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 631 |            instruction_->IsCheckCast() || | 
| Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 632 |            (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) || | 
 | 633 |            (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified())) | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 634 |         << "Unexpected instruction in read barrier marking slow path: " | 
 | 635 |         << instruction_->DebugName(); | 
| Roland Levillain | 19c5419 | 2016-11-04 13:44:09 +0000 | [diff] [blame] | 636 |     // The read barrier instrumentation of object ArrayGet | 
 | 637 |     // instructions does not support the HIntermediateAddress | 
 | 638 |     // instruction. | 
 | 639 |     DCHECK(!(instruction_->IsArrayGet() && | 
 | 640 |              instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress())); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 641 |  | 
 | 642 |     __ Bind(GetEntryLabel()); | 
| Roland Levillain | 4359e61 | 2016-07-20 11:32:19 +0100 | [diff] [blame] | 643 |     // No need to save live registers; it's taken care of by the | 
 | 644 |     // entrypoint. Also, there is no need to update the stack mask, | 
 | 645 |     // as this runtime call will not trigger a garbage collection. | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 646 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 647 |     DCHECK_NE(ref_.reg(), LR); | 
 | 648 |     DCHECK_NE(ref_.reg(), WSP); | 
 | 649 |     DCHECK_NE(ref_.reg(), WZR); | 
| Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 650 |     // IP0 is used internally by the ReadBarrierMarkRegX entry point | 
 | 651 |     // as a temporary, it cannot be the entry point's input/output. | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 652 |     DCHECK_NE(ref_.reg(), IP0); | 
 | 653 |     DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg(); | 
| Roland Levillain | 02b7580 | 2016-07-13 11:54:35 +0100 | [diff] [blame] | 654 |     // "Compact" slow path, saving two moves. | 
 | 655 |     // | 
 | 656 |     // Instead of using the standard runtime calling convention (input | 
 | 657 |     // and output in W0): | 
 | 658 |     // | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 659 |     //   W0 <- ref | 
| Roland Levillain | 02b7580 | 2016-07-13 11:54:35 +0100 | [diff] [blame] | 660 |     //   W0 <- ReadBarrierMark(W0) | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 661 |     //   ref <- W0 | 
| Roland Levillain | 02b7580 | 2016-07-13 11:54:35 +0100 | [diff] [blame] | 662 |     // | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 663 |     // we just use rX (the register containing `ref`) as input and output | 
| Roland Levillain | 02b7580 | 2016-07-13 11:54:35 +0100 | [diff] [blame] | 664 |     // of a dedicated entrypoint: | 
 | 665 |     // | 
 | 666 |     //   rX <- ReadBarrierMarkRegX(rX) | 
 | 667 |     // | 
 | 668 |     int32_t entry_point_offset = | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 669 |         CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg()); | 
| Roland Levillain | dec8f63 | 2016-07-22 17:10:06 +0100 | [diff] [blame] | 670 |     // This runtime call does not require a stack map. | 
 | 671 |     arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 672 |     __ B(GetExitLabel()); | 
 | 673 |   } | 
 | 674 |  | 
 | 675 |  private: | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 676 |   // The location (register) of the marked object reference. | 
 | 677 |   const Location ref_; | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 678 |  | 
 | 679 |   DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64); | 
 | 680 | }; | 
 | 681 |  | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 682 | // Slow path marking an object reference `ref` during a read barrier, | 
 | 683 | // and if needed, atomically updating the field `obj.field` in the | 
 | 684 | // object `obj` holding this reference after marking (contrary to | 
 | 685 | // ReadBarrierMarkSlowPathARM64 above, which never tries to update | 
 | 686 | // `obj.field`). | 
 | 687 | // | 
 | 688 | // This means that after the execution of this slow path, both `ref` | 
 | 689 | // and `obj.field` will be up-to-date; i.e., after the flip, both will | 
 | 690 | // hold the same to-space reference (unless another thread installed | 
 | 691 | // another object reference (different from `ref`) in `obj.field`). | 
 | 692 | class ReadBarrierMarkAndUpdateFieldSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 693 |  public: | 
 | 694 |   ReadBarrierMarkAndUpdateFieldSlowPathARM64(HInstruction* instruction, | 
 | 695 |                                              Location ref, | 
 | 696 |                                              Register obj, | 
 | 697 |                                              Location field_offset, | 
 | 698 |                                              Register temp) | 
 | 699 |       : SlowPathCodeARM64(instruction), | 
 | 700 |         ref_(ref), | 
 | 701 |         obj_(obj), | 
 | 702 |         field_offset_(field_offset), | 
 | 703 |         temp_(temp) { | 
 | 704 |     DCHECK(kEmitCompilerReadBarrier); | 
 | 705 |   } | 
 | 706 |  | 
 | 707 |   const char* GetDescription() const OVERRIDE { | 
 | 708 |     return "ReadBarrierMarkAndUpdateFieldSlowPathARM64"; | 
 | 709 |   } | 
 | 710 |  | 
 | 711 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 712 |     LocationSummary* locations = instruction_->GetLocations(); | 
 | 713 |     Register ref_reg = WRegisterFrom(ref_); | 
 | 714 |     DCHECK(locations->CanCall()); | 
 | 715 |     DCHECK(ref_.IsRegister()) << ref_; | 
 | 716 |     DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg(); | 
 | 717 |     // This slow path is only used by the UnsafeCASObject intrinsic. | 
 | 718 |     DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified())) | 
 | 719 |         << "Unexpected instruction in read barrier marking and field updating slow path: " | 
 | 720 |         << instruction_->DebugName(); | 
 | 721 |     DCHECK(instruction_->GetLocations()->Intrinsified()); | 
 | 722 |     DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject); | 
 | 723 |     DCHECK(field_offset_.IsRegister()) << field_offset_; | 
 | 724 |  | 
 | 725 |     __ Bind(GetEntryLabel()); | 
 | 726 |  | 
 | 727 |     // Save the old reference. | 
 | 728 |     // Note that we cannot use IP to save the old reference, as IP is | 
 | 729 |     // used internally by the ReadBarrierMarkRegX entry point, and we | 
 | 730 |     // need the old reference after the call to that entry point. | 
 | 731 |     DCHECK_NE(LocationFrom(temp_).reg(), IP0); | 
 | 732 |     __ Mov(temp_.W(), ref_reg); | 
 | 733 |  | 
 | 734 |     // No need to save live registers; it's taken care of by the | 
 | 735 |     // entrypoint. Also, there is no need to update the stack mask, | 
 | 736 |     // as this runtime call will not trigger a garbage collection. | 
 | 737 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
 | 738 |     DCHECK_NE(ref_.reg(), LR); | 
 | 739 |     DCHECK_NE(ref_.reg(), WSP); | 
 | 740 |     DCHECK_NE(ref_.reg(), WZR); | 
 | 741 |     // IP0 is used internally by the ReadBarrierMarkRegX entry point | 
 | 742 |     // as a temporary, it cannot be the entry point's input/output. | 
 | 743 |     DCHECK_NE(ref_.reg(), IP0); | 
 | 744 |     DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg(); | 
 | 745 |     // "Compact" slow path, saving two moves. | 
 | 746 |     // | 
 | 747 |     // Instead of using the standard runtime calling convention (input | 
 | 748 |     // and output in W0): | 
 | 749 |     // | 
 | 750 |     //   W0 <- ref | 
 | 751 |     //   W0 <- ReadBarrierMark(W0) | 
 | 752 |     //   ref <- W0 | 
 | 753 |     // | 
 | 754 |     // we just use rX (the register containing `ref`) as input and output | 
 | 755 |     // of a dedicated entrypoint: | 
 | 756 |     // | 
 | 757 |     //   rX <- ReadBarrierMarkRegX(rX) | 
 | 758 |     // | 
 | 759 |     int32_t entry_point_offset = | 
 | 760 |         CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg()); | 
 | 761 |     // This runtime call does not require a stack map. | 
 | 762 |     arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this); | 
 | 763 |  | 
 | 764 |     // If the new reference is different from the old reference, | 
 | 765 |     // update the field in the holder (`*(obj_ + field_offset_)`). | 
 | 766 |     // | 
 | 767 |     // Note that this field could also hold a different object, if | 
 | 768 |     // another thread had concurrently changed it. In that case, the | 
 | 769 |     // LDXR/CMP/BNE sequence of instructions in the compare-and-set | 
 | 770 |     // (CAS) operation below would abort the CAS, leaving the field | 
 | 771 |     // as-is. | 
 | 772 |     vixl::aarch64::Label done; | 
 | 773 |     __ Cmp(temp_.W(), ref_reg); | 
 | 774 |     __ B(eq, &done); | 
 | 775 |  | 
 | 776 |     // Update the the holder's field atomically.  This may fail if | 
 | 777 |     // mutator updates before us, but it's OK.  This is achieved | 
 | 778 |     // using a strong compare-and-set (CAS) operation with relaxed | 
 | 779 |     // memory synchronization ordering, where the expected value is | 
 | 780 |     // the old reference and the desired value is the new reference. | 
 | 781 |  | 
 | 782 |     MacroAssembler* masm = arm64_codegen->GetVIXLAssembler(); | 
 | 783 |     UseScratchRegisterScope temps(masm); | 
 | 784 |  | 
 | 785 |     // Convenience aliases. | 
 | 786 |     Register base = obj_.W(); | 
 | 787 |     Register offset = XRegisterFrom(field_offset_); | 
 | 788 |     Register expected = temp_.W(); | 
 | 789 |     Register value = ref_reg; | 
 | 790 |     Register tmp_ptr = temps.AcquireX();    // Pointer to actual memory. | 
 | 791 |     Register tmp_value = temps.AcquireW();  // Value in memory. | 
 | 792 |  | 
 | 793 |     __ Add(tmp_ptr, base.X(), Operand(offset)); | 
 | 794 |  | 
 | 795 |     if (kPoisonHeapReferences) { | 
 | 796 |       arm64_codegen->GetAssembler()->PoisonHeapReference(expected); | 
 | 797 |       if (value.Is(expected)) { | 
 | 798 |         // Do not poison `value`, as it is the same register as | 
 | 799 |         // `expected`, which has just been poisoned. | 
 | 800 |       } else { | 
 | 801 |         arm64_codegen->GetAssembler()->PoisonHeapReference(value); | 
 | 802 |       } | 
 | 803 |     } | 
 | 804 |  | 
 | 805 |     // do { | 
 | 806 |     //   tmp_value = [tmp_ptr] - expected; | 
 | 807 |     // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value)); | 
 | 808 |  | 
| Roland Levillain | 24a4d11 | 2016-10-26 13:10:46 +0100 | [diff] [blame] | 809 |     vixl::aarch64::Label loop_head, comparison_failed, exit_loop; | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 810 |     __ Bind(&loop_head); | 
 | 811 |     __ Ldxr(tmp_value, MemOperand(tmp_ptr)); | 
 | 812 |     __ Cmp(tmp_value, expected); | 
| Roland Levillain | 24a4d11 | 2016-10-26 13:10:46 +0100 | [diff] [blame] | 813 |     __ B(&comparison_failed, ne); | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 814 |     __ Stxr(tmp_value, value, MemOperand(tmp_ptr)); | 
 | 815 |     __ Cbnz(tmp_value, &loop_head); | 
| Roland Levillain | 24a4d11 | 2016-10-26 13:10:46 +0100 | [diff] [blame] | 816 |     __ B(&exit_loop); | 
 | 817 |     __ Bind(&comparison_failed); | 
 | 818 |     __ Clrex(); | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 819 |     __ Bind(&exit_loop); | 
 | 820 |  | 
 | 821 |     if (kPoisonHeapReferences) { | 
 | 822 |       arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected); | 
 | 823 |       if (value.Is(expected)) { | 
 | 824 |         // Do not unpoison `value`, as it is the same register as | 
 | 825 |         // `expected`, which has just been unpoisoned. | 
 | 826 |       } else { | 
 | 827 |         arm64_codegen->GetAssembler()->UnpoisonHeapReference(value); | 
 | 828 |       } | 
 | 829 |     } | 
 | 830 |  | 
 | 831 |     __ Bind(&done); | 
 | 832 |     __ B(GetExitLabel()); | 
 | 833 |   } | 
 | 834 |  | 
 | 835 |  private: | 
 | 836 |   // The location (register) of the marked object reference. | 
 | 837 |   const Location ref_; | 
 | 838 |   // The register containing the object holding the marked object reference field. | 
 | 839 |   const Register obj_; | 
 | 840 |   // The location of the offset of the marked reference field within `obj_`. | 
 | 841 |   Location field_offset_; | 
 | 842 |  | 
 | 843 |   const Register temp_; | 
 | 844 |  | 
 | 845 |   DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathARM64); | 
 | 846 | }; | 
 | 847 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 848 | // Slow path generating a read barrier for a heap reference. | 
 | 849 | class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 850 |  public: | 
 | 851 |   ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction, | 
 | 852 |                                            Location out, | 
 | 853 |                                            Location ref, | 
 | 854 |                                            Location obj, | 
 | 855 |                                            uint32_t offset, | 
 | 856 |                                            Location index) | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 857 |       : SlowPathCodeARM64(instruction), | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 858 |         out_(out), | 
 | 859 |         ref_(ref), | 
 | 860 |         obj_(obj), | 
 | 861 |         offset_(offset), | 
 | 862 |         index_(index) { | 
 | 863 |     DCHECK(kEmitCompilerReadBarrier); | 
 | 864 |     // If `obj` is equal to `out` or `ref`, it means the initial object | 
 | 865 |     // has been overwritten by (or after) the heap object reference load | 
 | 866 |     // to be instrumented, e.g.: | 
 | 867 |     // | 
 | 868 |     //   __ Ldr(out, HeapOperand(out, class_offset); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 869 |     //   codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 870 |     // | 
 | 871 |     // In that case, we have lost the information about the original | 
 | 872 |     // object, and the emitted read barrier cannot work properly. | 
 | 873 |     DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out; | 
 | 874 |     DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref; | 
 | 875 |   } | 
 | 876 |  | 
 | 877 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 878 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
 | 879 |     LocationSummary* locations = instruction_->GetLocations(); | 
 | 880 |     Primitive::Type type = Primitive::kPrimNot; | 
 | 881 |     DCHECK(locations->CanCall()); | 
 | 882 |     DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg())); | 
| Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 883 |     DCHECK(instruction_->IsInstanceFieldGet() || | 
 | 884 |            instruction_->IsStaticFieldGet() || | 
 | 885 |            instruction_->IsArrayGet() || | 
 | 886 |            instruction_->IsInstanceOf() || | 
 | 887 |            instruction_->IsCheckCast() || | 
| Roland Levillain | dec8f63 | 2016-07-22 17:10:06 +0100 | [diff] [blame] | 888 |            (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified()) | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 889 |         << "Unexpected instruction in read barrier for heap reference slow path: " | 
 | 890 |         << instruction_->DebugName(); | 
| Roland Levillain | 19c5419 | 2016-11-04 13:44:09 +0000 | [diff] [blame] | 891 |     // The read barrier instrumentation of object ArrayGet | 
 | 892 |     // instructions does not support the HIntermediateAddress | 
 | 893 |     // instruction. | 
| Roland Levillain | cd3d0fb | 2016-01-15 19:26:48 +0000 | [diff] [blame] | 894 |     DCHECK(!(instruction_->IsArrayGet() && | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 895 |              instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress())); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 896 |  | 
 | 897 |     __ Bind(GetEntryLabel()); | 
 | 898 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 899 |     SaveLiveRegisters(codegen, locations); | 
 | 900 |  | 
 | 901 |     // We may have to change the index's value, but as `index_` is a | 
 | 902 |     // constant member (like other "inputs" of this slow path), | 
 | 903 |     // introduce a copy of it, `index`. | 
 | 904 |     Location index = index_; | 
 | 905 |     if (index_.IsValid()) { | 
| Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 906 |       // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 907 |       if (instruction_->IsArrayGet()) { | 
 | 908 |         // Compute the actual memory offset and store it in `index`. | 
 | 909 |         Register index_reg = RegisterFrom(index_, Primitive::kPrimInt); | 
 | 910 |         DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg())); | 
 | 911 |         if (codegen->IsCoreCalleeSaveRegister(index_.reg())) { | 
 | 912 |           // We are about to change the value of `index_reg` (see the | 
 | 913 |           // calls to vixl::MacroAssembler::Lsl and | 
 | 914 |           // vixl::MacroAssembler::Mov below), but it has | 
 | 915 |           // not been saved by the previous call to | 
 | 916 |           // art::SlowPathCode::SaveLiveRegisters, as it is a | 
 | 917 |           // callee-save register -- | 
 | 918 |           // art::SlowPathCode::SaveLiveRegisters does not consider | 
 | 919 |           // callee-save registers, as it has been designed with the | 
 | 920 |           // assumption that callee-save registers are supposed to be | 
 | 921 |           // handled by the called function.  So, as a callee-save | 
 | 922 |           // register, `index_reg` _would_ eventually be saved onto | 
 | 923 |           // the stack, but it would be too late: we would have | 
 | 924 |           // changed its value earlier.  Therefore, we manually save | 
 | 925 |           // it here into another freely available register, | 
 | 926 |           // `free_reg`, chosen of course among the caller-save | 
 | 927 |           // registers (as a callee-save `free_reg` register would | 
 | 928 |           // exhibit the same problem). | 
 | 929 |           // | 
 | 930 |           // Note we could have requested a temporary register from | 
 | 931 |           // the register allocator instead; but we prefer not to, as | 
 | 932 |           // this is a slow path, and we know we can find a | 
 | 933 |           // caller-save register that is available. | 
 | 934 |           Register free_reg = FindAvailableCallerSaveRegister(codegen); | 
 | 935 |           __ Mov(free_reg.W(), index_reg); | 
 | 936 |           index_reg = free_reg; | 
 | 937 |           index = LocationFrom(index_reg); | 
 | 938 |         } else { | 
 | 939 |           // The initial register stored in `index_` has already been | 
 | 940 |           // saved in the call to art::SlowPathCode::SaveLiveRegisters | 
 | 941 |           // (as it is not a callee-save register), so we can freely | 
 | 942 |           // use it. | 
 | 943 |         } | 
 | 944 |         // Shifting the index value contained in `index_reg` by the scale | 
 | 945 |         // factor (2) cannot overflow in practice, as the runtime is | 
 | 946 |         // unable to allocate object arrays with a size larger than | 
 | 947 |         // 2^26 - 1 (that is, 2^28 - 4 bytes). | 
 | 948 |         __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type)); | 
 | 949 |         static_assert( | 
 | 950 |             sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t), | 
 | 951 |             "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes."); | 
 | 952 |         __ Add(index_reg, index_reg, Operand(offset_)); | 
 | 953 |       } else { | 
| Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 954 |         // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile | 
 | 955 |         // intrinsics, `index_` is not shifted by a scale factor of 2 | 
 | 956 |         // (as in the case of ArrayGet), as it is actually an offset | 
 | 957 |         // to an object field within an object. | 
 | 958 |         DCHECK(instruction_->IsInvoke()) << instruction_->DebugName(); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 959 |         DCHECK(instruction_->GetLocations()->Intrinsified()); | 
 | 960 |         DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) || | 
 | 961 |                (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)) | 
 | 962 |             << instruction_->AsInvoke()->GetIntrinsic(); | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 963 |         DCHECK_EQ(offset_, 0u); | 
| Roland Levillain | a7426c6 | 2016-08-03 15:02:10 +0100 | [diff] [blame] | 964 |         DCHECK(index_.IsRegister()); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 965 |       } | 
 | 966 |     } | 
 | 967 |  | 
 | 968 |     // We're moving two or three locations to locations that could | 
 | 969 |     // overlap, so we need a parallel move resolver. | 
 | 970 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 971 |     HParallelMove parallel_move(codegen->GetGraph()->GetArena()); | 
 | 972 |     parallel_move.AddMove(ref_, | 
 | 973 |                           LocationFrom(calling_convention.GetRegisterAt(0)), | 
 | 974 |                           type, | 
 | 975 |                           nullptr); | 
 | 976 |     parallel_move.AddMove(obj_, | 
 | 977 |                           LocationFrom(calling_convention.GetRegisterAt(1)), | 
 | 978 |                           type, | 
 | 979 |                           nullptr); | 
 | 980 |     if (index.IsValid()) { | 
 | 981 |       parallel_move.AddMove(index, | 
 | 982 |                             LocationFrom(calling_convention.GetRegisterAt(2)), | 
 | 983 |                             Primitive::kPrimInt, | 
 | 984 |                             nullptr); | 
 | 985 |       codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); | 
 | 986 |     } else { | 
 | 987 |       codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); | 
 | 988 |       arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_); | 
 | 989 |     } | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 990 |     arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow, | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 991 |                                  instruction_, | 
 | 992 |                                  instruction_->GetDexPc(), | 
 | 993 |                                  this); | 
 | 994 |     CheckEntrypointTypes< | 
 | 995 |         kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>(); | 
 | 996 |     arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type); | 
 | 997 |  | 
 | 998 |     RestoreLiveRegisters(codegen, locations); | 
 | 999 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1000 |     __ B(GetExitLabel()); | 
 | 1001 |   } | 
 | 1002 |  | 
 | 1003 |   const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; } | 
 | 1004 |  | 
 | 1005 |  private: | 
 | 1006 |   Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1007 |     size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode()); | 
 | 1008 |     size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode()); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1009 |     for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) { | 
 | 1010 |       if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) { | 
 | 1011 |         return Register(VIXLRegCodeFromART(i), kXRegSize); | 
 | 1012 |       } | 
 | 1013 |     } | 
 | 1014 |     // We shall never fail to find a free caller-save register, as | 
 | 1015 |     // there are more than two core caller-save registers on ARM64 | 
 | 1016 |     // (meaning it is possible to find one which is different from | 
 | 1017 |     // `ref` and `obj`). | 
 | 1018 |     DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u); | 
 | 1019 |     LOG(FATAL) << "Could not find a free register"; | 
 | 1020 |     UNREACHABLE(); | 
 | 1021 |   } | 
 | 1022 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1023 |   const Location out_; | 
 | 1024 |   const Location ref_; | 
 | 1025 |   const Location obj_; | 
 | 1026 |   const uint32_t offset_; | 
 | 1027 |   // An additional location containing an index to an array. | 
 | 1028 |   // Only used for HArrayGet and the UnsafeGetObject & | 
 | 1029 |   // UnsafeGetObjectVolatile intrinsics. | 
 | 1030 |   const Location index_; | 
 | 1031 |  | 
 | 1032 |   DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64); | 
 | 1033 | }; | 
 | 1034 |  | 
 | 1035 | // Slow path generating a read barrier for a GC root. | 
 | 1036 | class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 { | 
 | 1037 |  public: | 
 | 1038 |   ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root) | 
| David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 1039 |       : SlowPathCodeARM64(instruction), out_(out), root_(root) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1040 |     DCHECK(kEmitCompilerReadBarrier); | 
 | 1041 |   } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1042 |  | 
 | 1043 |   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { | 
 | 1044 |     LocationSummary* locations = instruction_->GetLocations(); | 
 | 1045 |     Primitive::Type type = Primitive::kPrimNot; | 
 | 1046 |     DCHECK(locations->CanCall()); | 
 | 1047 |     DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg())); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1048 |     DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString()) | 
 | 1049 |         << "Unexpected instruction in read barrier for GC root slow path: " | 
 | 1050 |         << instruction_->DebugName(); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1051 |  | 
 | 1052 |     __ Bind(GetEntryLabel()); | 
 | 1053 |     SaveLiveRegisters(codegen, locations); | 
 | 1054 |  | 
 | 1055 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 1056 |     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); | 
 | 1057 |     // The argument of the ReadBarrierForRootSlow is not a managed | 
 | 1058 |     // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`; | 
 | 1059 |     // thus we need a 64-bit move here, and we cannot use | 
 | 1060 |     // | 
 | 1061 |     //   arm64_codegen->MoveLocation( | 
 | 1062 |     //       LocationFrom(calling_convention.GetRegisterAt(0)), | 
 | 1063 |     //       root_, | 
 | 1064 |     //       type); | 
 | 1065 |     // | 
 | 1066 |     // which would emit a 32-bit move, as `type` is a (32-bit wide) | 
 | 1067 |     // reference type (`Primitive::kPrimNot`). | 
 | 1068 |     __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_)); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 1069 |     arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow, | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1070 |                                  instruction_, | 
 | 1071 |                                  instruction_->GetDexPc(), | 
 | 1072 |                                  this); | 
 | 1073 |     CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>(); | 
 | 1074 |     arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type); | 
 | 1075 |  | 
 | 1076 |     RestoreLiveRegisters(codegen, locations); | 
 | 1077 |     __ B(GetExitLabel()); | 
 | 1078 |   } | 
 | 1079 |  | 
 | 1080 |   const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; } | 
 | 1081 |  | 
 | 1082 |  private: | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1083 |   const Location out_; | 
 | 1084 |   const Location root_; | 
 | 1085 |  | 
 | 1086 |   DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64); | 
 | 1087 | }; | 
 | 1088 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1089 | #undef __ | 
 | 1090 |  | 
| Roland Levillain | 2d27c8e | 2015-04-28 15:48:45 +0100 | [diff] [blame] | 1091 | Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1092 |   Location next_location; | 
 | 1093 |   if (type == Primitive::kPrimVoid) { | 
 | 1094 |     LOG(FATAL) << "Unreachable type " << type; | 
 | 1095 |   } | 
 | 1096 |  | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1097 |   if (Primitive::IsFloatingPointType(type) && | 
| Roland Levillain | 2d27c8e | 2015-04-28 15:48:45 +0100 | [diff] [blame] | 1098 |       (float_index_ < calling_convention.GetNumberOfFpuRegisters())) { | 
 | 1099 |     next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++)); | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1100 |   } else if (!Primitive::IsFloatingPointType(type) && | 
 | 1101 |              (gp_index_ < calling_convention.GetNumberOfRegisters())) { | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1102 |     next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++)); | 
 | 1103 |   } else { | 
 | 1104 |     size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1105 |     next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) | 
 | 1106 |                                                  : Location::StackSlot(stack_offset); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1107 |   } | 
 | 1108 |  | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1109 |   // Space on the stack is reserved for all arguments. | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1110 |   stack_index_ += Primitive::Is64BitType(type) ? 2 : 1; | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1111 |   return next_location; | 
 | 1112 | } | 
 | 1113 |  | 
| Nicolas Geoffray | fd88f16 | 2015-06-03 11:23:52 +0100 | [diff] [blame] | 1114 | Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const { | 
| Nicolas Geoffray | 38207af | 2015-06-01 15:46:22 +0100 | [diff] [blame] | 1115 |   return LocationFrom(kArtMethodRegister); | 
| Nicolas Geoffray | fd88f16 | 2015-06-03 11:23:52 +0100 | [diff] [blame] | 1116 | } | 
 | 1117 |  | 
| Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 1118 | CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, | 
 | 1119 |                                        const Arm64InstructionSetFeatures& isa_features, | 
| Serban Constantinescu | ecc4366 | 2015-08-13 13:33:12 +0100 | [diff] [blame] | 1120 |                                        const CompilerOptions& compiler_options, | 
 | 1121 |                                        OptimizingCompilerStats* stats) | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1122 |     : CodeGenerator(graph, | 
 | 1123 |                     kNumberOfAllocatableRegisters, | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1124 |                     kNumberOfAllocatableFPRegisters, | 
| Calin Juravle | cd6dffe | 2015-01-08 17:35:35 +0000 | [diff] [blame] | 1125 |                     kNumberOfAllocatableRegisterPairs, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1126 |                     callee_saved_core_registers.GetList(), | 
 | 1127 |                     callee_saved_fp_registers.GetList(), | 
| Serban Constantinescu | ecc4366 | 2015-08-13 13:33:12 +0100 | [diff] [blame] | 1128 |                     compiler_options, | 
 | 1129 |                     stats), | 
| Alexandre Rames | c01a664 | 2016-04-15 11:54:06 +0100 | [diff] [blame] | 1130 |       block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 1131 |       jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1132 |       location_builder_(graph, this), | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1133 |       instruction_visitor_(graph, this), | 
| Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 1134 |       move_resolver_(graph->GetArena(), this), | 
| Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 1135 |       assembler_(graph->GetArena()), | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 1136 |       isa_features_(isa_features), | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 1137 |       uint32_literals_(std::less<uint32_t>(), | 
 | 1138 |                        graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
| Vladimir Marko | 5233f93 | 2015-09-29 19:01:15 +0100 | [diff] [blame] | 1139 |       uint64_literals_(std::less<uint64_t>(), | 
 | 1140 |                        graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
 | 1141 |       method_patches_(MethodReferenceComparator(), | 
 | 1142 |                       graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
 | 1143 |       call_patches_(MethodReferenceComparator(), | 
 | 1144 |                     graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
 | 1145 |       relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 1146 |       pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
 | 1147 |       boot_image_string_patches_(StringReferenceValueComparator(), | 
 | 1148 |                                  graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
 | 1149 |       pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 1150 |       boot_image_type_patches_(TypeReferenceValueComparator(), | 
 | 1151 |                                graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
 | 1152 |       pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 1153 |       boot_image_address_patches_(std::less<uint32_t>(), | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 1154 |                                   graph->GetArena()->Adapter(kArenaAllocCodeGenerator)), | 
 | 1155 |       jit_string_patches_(StringReferenceValueComparator(), | 
 | 1156 |                           graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) { | 
| Nicolas Geoffray | d97dc40 | 2015-01-22 13:50:01 +0000 | [diff] [blame] | 1157 |   // Save the link register (containing the return address) to mimic Quick. | 
| Serban Constantinescu | 3d087de | 2015-01-28 11:57:05 +0000 | [diff] [blame] | 1158 |   AddAllocatedRegister(LocationFrom(lr)); | 
| Nicolas Geoffray | d97dc40 | 2015-01-22 13:50:01 +0000 | [diff] [blame] | 1159 | } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1160 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1161 | #define __ GetVIXLAssembler()-> | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1162 |  | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 1163 | void CodeGeneratorARM64::EmitJumpTables() { | 
| Alexandre Rames | c01a664 | 2016-04-15 11:54:06 +0100 | [diff] [blame] | 1164 |   for (auto&& jump_table : jump_tables_) { | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 1165 |     jump_table->EmitTable(this); | 
 | 1166 |   } | 
 | 1167 | } | 
 | 1168 |  | 
| Serban Constantinescu | 32f5b4d | 2014-11-25 20:05:46 +0000 | [diff] [blame] | 1169 | void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) { | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 1170 |   EmitJumpTables(); | 
| Serban Constantinescu | 32f5b4d | 2014-11-25 20:05:46 +0000 | [diff] [blame] | 1171 |   // Ensure we emit the literal pool. | 
 | 1172 |   __ FinalizeCode(); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 1173 |  | 
| Serban Constantinescu | 32f5b4d | 2014-11-25 20:05:46 +0000 | [diff] [blame] | 1174 |   CodeGenerator::Finalize(allocator); | 
 | 1175 | } | 
 | 1176 |  | 
| Zheng Xu | ad4450e | 2015-04-17 18:48:56 +0800 | [diff] [blame] | 1177 | void ParallelMoveResolverARM64::PrepareForEmitNativeCode() { | 
 | 1178 |   // Note: There are 6 kinds of moves: | 
 | 1179 |   // 1. constant -> GPR/FPR (non-cycle) | 
 | 1180 |   // 2. constant -> stack (non-cycle) | 
 | 1181 |   // 3. GPR/FPR -> GPR/FPR | 
 | 1182 |   // 4. GPR/FPR -> stack | 
 | 1183 |   // 5. stack -> GPR/FPR | 
 | 1184 |   // 6. stack -> stack (non-cycle) | 
 | 1185 |   // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5 | 
 | 1186 |   // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting | 
 | 1187 |   // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the | 
 | 1188 |   // dependency. | 
 | 1189 |   vixl_temps_.Open(GetVIXLAssembler()); | 
 | 1190 | } | 
 | 1191 |  | 
 | 1192 | void ParallelMoveResolverARM64::FinishEmitNativeCode() { | 
 | 1193 |   vixl_temps_.Close(); | 
 | 1194 | } | 
 | 1195 |  | 
 | 1196 | Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) { | 
 | 1197 |   DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister || | 
 | 1198 |          kind == Location::kStackSlot || kind == Location::kDoubleStackSlot); | 
 | 1199 |   kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister; | 
 | 1200 |   Location scratch = GetScratchLocation(kind); | 
 | 1201 |   if (!scratch.Equals(Location::NoLocation())) { | 
 | 1202 |     return scratch; | 
 | 1203 |   } | 
 | 1204 |   // Allocate from VIXL temp registers. | 
 | 1205 |   if (kind == Location::kRegister) { | 
 | 1206 |     scratch = LocationFrom(vixl_temps_.AcquireX()); | 
 | 1207 |   } else { | 
 | 1208 |     DCHECK(kind == Location::kFpuRegister); | 
 | 1209 |     scratch = LocationFrom(vixl_temps_.AcquireD()); | 
 | 1210 |   } | 
 | 1211 |   AddScratchLocation(scratch); | 
 | 1212 |   return scratch; | 
 | 1213 | } | 
 | 1214 |  | 
 | 1215 | void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) { | 
 | 1216 |   if (loc.IsRegister()) { | 
 | 1217 |     vixl_temps_.Release(XRegisterFrom(loc)); | 
 | 1218 |   } else { | 
 | 1219 |     DCHECK(loc.IsFpuRegister()); | 
 | 1220 |     vixl_temps_.Release(DRegisterFrom(loc)); | 
 | 1221 |   } | 
 | 1222 |   RemoveScratchLocation(loc); | 
 | 1223 | } | 
 | 1224 |  | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1225 | void ParallelMoveResolverARM64::EmitMove(size_t index) { | 
| Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 1226 |   MoveOperands* move = moves_[index]; | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1227 |   codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1228 | } | 
 | 1229 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1230 | void CodeGeneratorARM64::GenerateFrameEntry() { | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 1231 |   MacroAssembler* masm = GetVIXLAssembler(); | 
 | 1232 |   BlockPoolsScope block_pools(masm); | 
| Nicolas Geoffray | 1cf9528 | 2014-12-12 19:22:03 +0000 | [diff] [blame] | 1233 |   __ Bind(&frame_entry_label_); | 
 | 1234 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1235 |   bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod(); | 
 | 1236 |   if (do_overflow_check) { | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 1237 |     UseScratchRegisterScope temps(masm); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1238 |     Register temp = temps.AcquireX(); | 
| Nicolas Geoffray | d97dc40 | 2015-01-22 13:50:01 +0000 | [diff] [blame] | 1239 |     DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks()); | 
| Serban Constantinescu | 3d087de | 2015-01-28 11:57:05 +0000 | [diff] [blame] | 1240 |     __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64))); | 
| Nicolas Geoffray | d97dc40 | 2015-01-22 13:50:01 +0000 | [diff] [blame] | 1241 |     __ Ldr(wzr, MemOperand(temp, 0)); | 
 | 1242 |     RecordPcInfo(nullptr, 0); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1243 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1244 |  | 
| Nicolas Geoffray | c0572a4 | 2015-02-06 14:35:25 +0000 | [diff] [blame] | 1245 |   if (!HasEmptyFrame()) { | 
 | 1246 |     int frame_size = GetFrameSize(); | 
 | 1247 |     // Stack layout: | 
 | 1248 |     //      sp[frame_size - 8]        : lr. | 
 | 1249 |     //      ...                       : other preserved core registers. | 
 | 1250 |     //      ...                       : other preserved fp registers. | 
 | 1251 |     //      ...                       : reserved frame space. | 
 | 1252 |     //      sp[0]                     : current method. | 
| Nicolas Geoffray | 96eeb4e | 2016-10-12 22:03:31 +0100 | [diff] [blame] | 1253 |  | 
 | 1254 |     // Save the current method if we need it. Note that we do not | 
 | 1255 |     // do this in HCurrentMethod, as the instruction might have been removed | 
 | 1256 |     // in the SSA graph. | 
 | 1257 |     if (RequiresCurrentMethod()) { | 
 | 1258 |       __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex)); | 
| Nicolas Geoffray | 9989b16 | 2016-10-13 13:42:30 +0100 | [diff] [blame] | 1259 |     } else { | 
 | 1260 |       __ Claim(frame_size); | 
| Nicolas Geoffray | 96eeb4e | 2016-10-12 22:03:31 +0100 | [diff] [blame] | 1261 |     } | 
| David Srbecky | c6b4dd8 | 2015-04-07 20:32:43 +0100 | [diff] [blame] | 1262 |     GetAssembler()->cfi().AdjustCFAOffset(frame_size); | 
| Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 1263 |     GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(), | 
 | 1264 |         frame_size - GetCoreSpillSize()); | 
 | 1265 |     GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(), | 
 | 1266 |         frame_size - FrameEntrySpillSize()); | 
| Nicolas Geoffray | c0572a4 | 2015-02-06 14:35:25 +0000 | [diff] [blame] | 1267 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1268 | } | 
 | 1269 |  | 
 | 1270 | void CodeGeneratorARM64::GenerateFrameExit() { | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 1271 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
| David Srbecky | c34dc93 | 2015-04-12 09:27:43 +0100 | [diff] [blame] | 1272 |   GetAssembler()->cfi().RememberState(); | 
| Nicolas Geoffray | c0572a4 | 2015-02-06 14:35:25 +0000 | [diff] [blame] | 1273 |   if (!HasEmptyFrame()) { | 
 | 1274 |     int frame_size = GetFrameSize(); | 
| Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 1275 |     GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(), | 
 | 1276 |         frame_size - FrameEntrySpillSize()); | 
 | 1277 |     GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(), | 
 | 1278 |         frame_size - GetCoreSpillSize()); | 
| Nicolas Geoffray | c0572a4 | 2015-02-06 14:35:25 +0000 | [diff] [blame] | 1279 |     __ Drop(frame_size); | 
| David Srbecky | c6b4dd8 | 2015-04-07 20:32:43 +0100 | [diff] [blame] | 1280 |     GetAssembler()->cfi().AdjustCFAOffset(-frame_size); | 
| Nicolas Geoffray | c0572a4 | 2015-02-06 14:35:25 +0000 | [diff] [blame] | 1281 |   } | 
| David Srbecky | c34dc93 | 2015-04-12 09:27:43 +0100 | [diff] [blame] | 1282 |   __ Ret(); | 
 | 1283 |   GetAssembler()->cfi().RestoreState(); | 
 | 1284 |   GetAssembler()->cfi().DefCFAOffset(GetFrameSize()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1285 | } | 
 | 1286 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1287 | CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const { | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 1288 |   DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0)); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1289 |   return CPURegList(CPURegister::kRegister, kXRegSize, | 
 | 1290 |                     core_spill_mask_); | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 1291 | } | 
 | 1292 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1293 | CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const { | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 1294 |   DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_, | 
 | 1295 |                                          GetNumberOfFloatingPointRegisters())); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1296 |   return CPURegList(CPURegister::kFPRegister, kDRegSize, | 
 | 1297 |                     fpu_spill_mask_); | 
| Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 1298 | } | 
 | 1299 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1300 | void CodeGeneratorARM64::Bind(HBasicBlock* block) { | 
 | 1301 |   __ Bind(GetLabelOf(block)); | 
 | 1302 | } | 
 | 1303 |  | 
| Calin Juravle | 175dc73 | 2015-08-25 15:42:32 +0100 | [diff] [blame] | 1304 | void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) { | 
 | 1305 |   DCHECK(location.IsRegister()); | 
 | 1306 |   __ Mov(RegisterFrom(location, Primitive::kPrimInt), value); | 
 | 1307 | } | 
 | 1308 |  | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1309 | void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) { | 
 | 1310 |   if (location.IsRegister()) { | 
 | 1311 |     locations->AddTemp(location); | 
 | 1312 |   } else { | 
 | 1313 |     UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location; | 
 | 1314 |   } | 
 | 1315 | } | 
 | 1316 |  | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 1317 | void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1318 |   UseScratchRegisterScope temps(GetVIXLAssembler()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1319 |   Register card = temps.AcquireX(); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1320 |   Register temp = temps.AcquireW();   // Index within the CardTable - 32bit. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1321 |   vixl::aarch64::Label done; | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 1322 |   if (value_can_be_null) { | 
 | 1323 |     __ Cbz(value, &done); | 
 | 1324 |   } | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1325 |   __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value())); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1326 |   __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1327 |   __ Strb(card, MemOperand(card, temp.X())); | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 1328 |   if (value_can_be_null) { | 
 | 1329 |     __ Bind(&done); | 
 | 1330 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1331 | } | 
 | 1332 |  | 
| David Brazdil | 58282f4 | 2016-01-14 12:45:10 +0000 | [diff] [blame] | 1333 | void CodeGeneratorARM64::SetupBlockedRegisters() const { | 
| Serban Constantinescu | 3d087de | 2015-01-28 11:57:05 +0000 | [diff] [blame] | 1334 |   // Blocked core registers: | 
 | 1335 |   //      lr        : Runtime reserved. | 
 | 1336 |   //      tr        : Runtime reserved. | 
 | 1337 |   //      xSuspend  : Runtime reserved. TODO: Unblock this when the runtime stops using it. | 
 | 1338 |   //      ip1       : VIXL core temp. | 
 | 1339 |   //      ip0       : VIXL core temp. | 
 | 1340 |   // | 
 | 1341 |   // Blocked fp registers: | 
 | 1342 |   //      d31       : VIXL fp temp. | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1343 |   CPURegList reserved_core_registers = vixl_reserved_core_registers; | 
 | 1344 |   reserved_core_registers.Combine(runtime_reserved_core_registers); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1345 |   while (!reserved_core_registers.IsEmpty()) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1346 |     blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true; | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1347 |   } | 
| Serban Constantinescu | 3d087de | 2015-01-28 11:57:05 +0000 | [diff] [blame] | 1348 |  | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1349 |   CPURegList reserved_fp_registers = vixl_reserved_fp_registers; | 
| Zheng Xu | a3ec394 | 2015-02-15 18:39:46 +0800 | [diff] [blame] | 1350 |   while (!reserved_fp_registers.IsEmpty()) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1351 |     blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true; | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1352 |   } | 
| Serban Constantinescu | 3d087de | 2015-01-28 11:57:05 +0000 | [diff] [blame] | 1353 |  | 
| David Brazdil | 58282f4 | 2016-01-14 12:45:10 +0000 | [diff] [blame] | 1354 |   if (GetGraph()->IsDebuggable()) { | 
| Nicolas Geoffray | ecf680d | 2015-10-05 11:15:37 +0100 | [diff] [blame] | 1355 |     // Stubs do not save callee-save floating point registers. If the graph | 
 | 1356 |     // is debuggable, we need to deal with these registers differently. For | 
 | 1357 |     // now, just block them. | 
| David Brazdil | 58282f4 | 2016-01-14 12:45:10 +0000 | [diff] [blame] | 1358 |     CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers; | 
 | 1359 |     while (!reserved_fp_registers_debuggable.IsEmpty()) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1360 |       blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true; | 
| Serban Constantinescu | 3d087de | 2015-01-28 11:57:05 +0000 | [diff] [blame] | 1361 |     } | 
 | 1362 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1363 | } | 
 | 1364 |  | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1365 | size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) { | 
 | 1366 |   Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize); | 
 | 1367 |   __ Str(reg, MemOperand(sp, stack_index)); | 
 | 1368 |   return kArm64WordSize; | 
 | 1369 | } | 
 | 1370 |  | 
 | 1371 | size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) { | 
 | 1372 |   Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize); | 
 | 1373 |   __ Ldr(reg, MemOperand(sp, stack_index)); | 
 | 1374 |   return kArm64WordSize; | 
 | 1375 | } | 
 | 1376 |  | 
 | 1377 | size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) { | 
 | 1378 |   FPRegister reg = FPRegister(reg_id, kDRegSize); | 
 | 1379 |   __ Str(reg, MemOperand(sp, stack_index)); | 
 | 1380 |   return kArm64WordSize; | 
 | 1381 | } | 
 | 1382 |  | 
 | 1383 | size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) { | 
 | 1384 |   FPRegister reg = FPRegister(reg_id, kDRegSize); | 
 | 1385 |   __ Ldr(reg, MemOperand(sp, stack_index)); | 
 | 1386 |   return kArm64WordSize; | 
 | 1387 | } | 
 | 1388 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1389 | void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const { | 
| David Brazdil | c7465286 | 2015-05-13 17:50:09 +0100 | [diff] [blame] | 1390 |   stream << XRegister(reg); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1391 | } | 
 | 1392 |  | 
 | 1393 | void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const { | 
| David Brazdil | c7465286 | 2015-05-13 17:50:09 +0100 | [diff] [blame] | 1394 |   stream << DRegister(reg); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1395 | } | 
 | 1396 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1397 | void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) { | 
| Nicolas Geoffray | d6138ef | 2015-02-18 14:48:53 +0000 | [diff] [blame] | 1398 |   if (constant->IsIntConstant()) { | 
 | 1399 |     __ Mov(Register(destination), constant->AsIntConstant()->GetValue()); | 
 | 1400 |   } else if (constant->IsLongConstant()) { | 
 | 1401 |     __ Mov(Register(destination), constant->AsLongConstant()->GetValue()); | 
 | 1402 |   } else if (constant->IsNullConstant()) { | 
 | 1403 |     __ Mov(Register(destination), 0); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1404 |   } else if (constant->IsFloatConstant()) { | 
 | 1405 |     __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue()); | 
 | 1406 |   } else { | 
 | 1407 |     DCHECK(constant->IsDoubleConstant()); | 
 | 1408 |     __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue()); | 
 | 1409 |   } | 
 | 1410 | } | 
 | 1411 |  | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1412 |  | 
 | 1413 | static bool CoherentConstantAndType(Location constant, Primitive::Type type) { | 
 | 1414 |   DCHECK(constant.IsConstant()); | 
 | 1415 |   HConstant* cst = constant.GetConstant(); | 
 | 1416 |   return (cst->IsIntConstant() && type == Primitive::kPrimInt) || | 
| Nicolas Geoffray | d6138ef | 2015-02-18 14:48:53 +0000 | [diff] [blame] | 1417 |          // Null is mapped to a core W register, which we associate with kPrimInt. | 
 | 1418 |          (cst->IsNullConstant() && type == Primitive::kPrimInt) || | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1419 |          (cst->IsLongConstant() && type == Primitive::kPrimLong) || | 
 | 1420 |          (cst->IsFloatConstant() && type == Primitive::kPrimFloat) || | 
 | 1421 |          (cst->IsDoubleConstant() && type == Primitive::kPrimDouble); | 
 | 1422 | } | 
 | 1423 |  | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1424 | void CodeGeneratorARM64::MoveLocation(Location destination, | 
 | 1425 |                                       Location source, | 
 | 1426 |                                       Primitive::Type dst_type) { | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1427 |   if (source.Equals(destination)) { | 
 | 1428 |     return; | 
 | 1429 |   } | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1430 |  | 
 | 1431 |   // A valid move can always be inferred from the destination and source | 
 | 1432 |   // locations. When moving from and to a register, the argument type can be | 
 | 1433 |   // used to generate 32bit instead of 64bit moves. In debug mode we also | 
 | 1434 |   // checks the coherency of the locations and the type. | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1435 |   bool unspecified_type = (dst_type == Primitive::kPrimVoid); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1436 |  | 
 | 1437 |   if (destination.IsRegister() || destination.IsFpuRegister()) { | 
 | 1438 |     if (unspecified_type) { | 
 | 1439 |       HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr; | 
 | 1440 |       if (source.IsStackSlot() || | 
| Nicolas Geoffray | d6138ef | 2015-02-18 14:48:53 +0000 | [diff] [blame] | 1441 |           (src_cst != nullptr && (src_cst->IsIntConstant() | 
 | 1442 |                                   || src_cst->IsFloatConstant() | 
 | 1443 |                                   || src_cst->IsNullConstant()))) { | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1444 |         // For stack slots and 32bit constants, a 64bit type is appropriate. | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1445 |         dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat; | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1446 |       } else { | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1447 |         // If the source is a double stack slot or a 64bit constant, a 64bit | 
 | 1448 |         // type is appropriate. Else the source is a register, and since the | 
 | 1449 |         // type has not been specified, we chose a 64bit type to force a 64bit | 
 | 1450 |         // move. | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1451 |         dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble; | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1452 |       } | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1453 |     } | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1454 |     DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) || | 
 | 1455 |            (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type))); | 
 | 1456 |     CPURegister dst = CPURegisterFrom(destination, dst_type); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1457 |     if (source.IsStackSlot() || source.IsDoubleStackSlot()) { | 
 | 1458 |       DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot()); | 
 | 1459 |       __ Ldr(dst, StackOperandFrom(source)); | 
 | 1460 |     } else if (source.IsConstant()) { | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1461 |       DCHECK(CoherentConstantAndType(source, dst_type)); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1462 |       MoveConstant(dst, source.GetConstant()); | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1463 |     } else if (source.IsRegister()) { | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1464 |       if (destination.IsRegister()) { | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1465 |         __ Mov(Register(dst), RegisterFrom(source, dst_type)); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1466 |       } else { | 
| Zheng Xu | ad4450e | 2015-04-17 18:48:56 +0800 | [diff] [blame] | 1467 |         DCHECK(destination.IsFpuRegister()); | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1468 |         Primitive::Type source_type = Primitive::Is64BitType(dst_type) | 
 | 1469 |             ? Primitive::kPrimLong | 
 | 1470 |             : Primitive::kPrimInt; | 
 | 1471 |         __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type)); | 
 | 1472 |       } | 
 | 1473 |     } else { | 
 | 1474 |       DCHECK(source.IsFpuRegister()); | 
 | 1475 |       if (destination.IsRegister()) { | 
 | 1476 |         Primitive::Type source_type = Primitive::Is64BitType(dst_type) | 
 | 1477 |             ? Primitive::kPrimDouble | 
 | 1478 |             : Primitive::kPrimFloat; | 
 | 1479 |         __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type)); | 
 | 1480 |       } else { | 
 | 1481 |         DCHECK(destination.IsFpuRegister()); | 
 | 1482 |         __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type)); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1483 |       } | 
 | 1484 |     } | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1485 |   } else {  // The destination is not a register. It must be a stack slot. | 
 | 1486 |     DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot()); | 
 | 1487 |     if (source.IsRegister() || source.IsFpuRegister()) { | 
 | 1488 |       if (unspecified_type) { | 
 | 1489 |         if (source.IsRegister()) { | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1490 |           dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong; | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1491 |         } else { | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1492 |           dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble; | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1493 |         } | 
 | 1494 |       } | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1495 |       DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) && | 
 | 1496 |              (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type))); | 
 | 1497 |       __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination)); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1498 |     } else if (source.IsConstant()) { | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1499 |       DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type)) | 
 | 1500 |           << source << " " << dst_type; | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1501 |       UseScratchRegisterScope temps(GetVIXLAssembler()); | 
 | 1502 |       HConstant* src_cst = source.GetConstant(); | 
 | 1503 |       CPURegister temp; | 
| Alexandre Rames | b2b753c | 2016-08-02 13:45:28 +0100 | [diff] [blame] | 1504 |       if (src_cst->IsZeroBitPattern()) { | 
 | 1505 |         temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr; | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1506 |       } else { | 
| Alexandre Rames | b2b753c | 2016-08-02 13:45:28 +0100 | [diff] [blame] | 1507 |         if (src_cst->IsIntConstant()) { | 
 | 1508 |           temp = temps.AcquireW(); | 
 | 1509 |         } else if (src_cst->IsLongConstant()) { | 
 | 1510 |           temp = temps.AcquireX(); | 
 | 1511 |         } else if (src_cst->IsFloatConstant()) { | 
 | 1512 |           temp = temps.AcquireS(); | 
 | 1513 |         } else { | 
 | 1514 |           DCHECK(src_cst->IsDoubleConstant()); | 
 | 1515 |           temp = temps.AcquireD(); | 
 | 1516 |         } | 
 | 1517 |         MoveConstant(temp, src_cst); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1518 |       } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1519 |       __ Str(temp, StackOperandFrom(destination)); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1520 |     } else { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1521 |       DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot()); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1522 |       DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1523 |       UseScratchRegisterScope temps(GetVIXLAssembler()); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1524 |       // There is generally less pressure on FP registers. | 
 | 1525 |       FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS(); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1526 |       __ Ldr(temp, StackOperandFrom(source)); | 
 | 1527 |       __ Str(temp, StackOperandFrom(destination)); | 
 | 1528 |     } | 
 | 1529 |   } | 
 | 1530 | } | 
 | 1531 |  | 
 | 1532 | void CodeGeneratorARM64::Load(Primitive::Type type, | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1533 |                               CPURegister dst, | 
 | 1534 |                               const MemOperand& src) { | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1535 |   switch (type) { | 
 | 1536 |     case Primitive::kPrimBoolean: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1537 |       __ Ldrb(Register(dst), src); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1538 |       break; | 
 | 1539 |     case Primitive::kPrimByte: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1540 |       __ Ldrsb(Register(dst), src); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1541 |       break; | 
 | 1542 |     case Primitive::kPrimShort: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1543 |       __ Ldrsh(Register(dst), src); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1544 |       break; | 
 | 1545 |     case Primitive::kPrimChar: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1546 |       __ Ldrh(Register(dst), src); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1547 |       break; | 
 | 1548 |     case Primitive::kPrimInt: | 
 | 1549 |     case Primitive::kPrimNot: | 
 | 1550 |     case Primitive::kPrimLong: | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1551 |     case Primitive::kPrimFloat: | 
 | 1552 |     case Primitive::kPrimDouble: | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1553 |       DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1554 |       __ Ldr(dst, src); | 
 | 1555 |       break; | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1556 |     case Primitive::kPrimVoid: | 
 | 1557 |       LOG(FATAL) << "Unreachable type " << type; | 
 | 1558 |   } | 
 | 1559 | } | 
 | 1560 |  | 
| Calin Juravle | 77520bc | 2015-01-12 18:45:46 +0000 | [diff] [blame] | 1561 | void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction, | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1562 |                                      CPURegister dst, | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1563 |                                      const MemOperand& src, | 
 | 1564 |                                      bool needs_null_check) { | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 1565 |   MacroAssembler* masm = GetVIXLAssembler(); | 
 | 1566 |   BlockPoolsScope block_pools(masm); | 
 | 1567 |   UseScratchRegisterScope temps(masm); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1568 |   Register temp_base = temps.AcquireX(); | 
| Calin Juravle | 77520bc | 2015-01-12 18:45:46 +0000 | [diff] [blame] | 1569 |   Primitive::Type type = instruction->GetType(); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1570 |  | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1571 |   DCHECK(!src.IsPreIndex()); | 
 | 1572 |   DCHECK(!src.IsPostIndex()); | 
 | 1573 |  | 
 | 1574 |   // TODO(vixl): Let the MacroAssembler handle MemOperand. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1575 |   __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src)); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1576 |   MemOperand base = MemOperand(temp_base); | 
 | 1577 |   switch (type) { | 
 | 1578 |     case Primitive::kPrimBoolean: | 
 | 1579 |       __ Ldarb(Register(dst), base); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1580 |       if (needs_null_check) { | 
 | 1581 |         MaybeRecordImplicitNullCheck(instruction); | 
 | 1582 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1583 |       break; | 
 | 1584 |     case Primitive::kPrimByte: | 
 | 1585 |       __ Ldarb(Register(dst), base); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1586 |       if (needs_null_check) { | 
 | 1587 |         MaybeRecordImplicitNullCheck(instruction); | 
 | 1588 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1589 |       __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte); | 
 | 1590 |       break; | 
 | 1591 |     case Primitive::kPrimChar: | 
 | 1592 |       __ Ldarh(Register(dst), base); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1593 |       if (needs_null_check) { | 
 | 1594 |         MaybeRecordImplicitNullCheck(instruction); | 
 | 1595 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1596 |       break; | 
 | 1597 |     case Primitive::kPrimShort: | 
 | 1598 |       __ Ldarh(Register(dst), base); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1599 |       if (needs_null_check) { | 
 | 1600 |         MaybeRecordImplicitNullCheck(instruction); | 
 | 1601 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1602 |       __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte); | 
 | 1603 |       break; | 
 | 1604 |     case Primitive::kPrimInt: | 
 | 1605 |     case Primitive::kPrimNot: | 
 | 1606 |     case Primitive::kPrimLong: | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1607 |       DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type)); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1608 |       __ Ldar(Register(dst), base); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1609 |       if (needs_null_check) { | 
 | 1610 |         MaybeRecordImplicitNullCheck(instruction); | 
 | 1611 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1612 |       break; | 
 | 1613 |     case Primitive::kPrimFloat: | 
 | 1614 |     case Primitive::kPrimDouble: { | 
 | 1615 |       DCHECK(dst.IsFPRegister()); | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1616 |       DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type)); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1617 |  | 
 | 1618 |       Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW(); | 
 | 1619 |       __ Ldar(temp, base); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1620 |       if (needs_null_check) { | 
 | 1621 |         MaybeRecordImplicitNullCheck(instruction); | 
 | 1622 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1623 |       __ Fmov(FPRegister(dst), temp); | 
 | 1624 |       break; | 
 | 1625 |     } | 
 | 1626 |     case Primitive::kPrimVoid: | 
 | 1627 |       LOG(FATAL) << "Unreachable type " << type; | 
 | 1628 |   } | 
 | 1629 | } | 
 | 1630 |  | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1631 | void CodeGeneratorARM64::Store(Primitive::Type type, | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1632 |                                CPURegister src, | 
 | 1633 |                                const MemOperand& dst) { | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1634 |   switch (type) { | 
 | 1635 |     case Primitive::kPrimBoolean: | 
 | 1636 |     case Primitive::kPrimByte: | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1637 |       __ Strb(Register(src), dst); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1638 |       break; | 
 | 1639 |     case Primitive::kPrimChar: | 
 | 1640 |     case Primitive::kPrimShort: | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1641 |       __ Strh(Register(src), dst); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1642 |       break; | 
 | 1643 |     case Primitive::kPrimInt: | 
 | 1644 |     case Primitive::kPrimNot: | 
 | 1645 |     case Primitive::kPrimLong: | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1646 |     case Primitive::kPrimFloat: | 
 | 1647 |     case Primitive::kPrimDouble: | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1648 |       DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type)); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1649 |       __ Str(src, dst); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1650 |       break; | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 1651 |     case Primitive::kPrimVoid: | 
 | 1652 |       LOG(FATAL) << "Unreachable type " << type; | 
 | 1653 |   } | 
 | 1654 | } | 
 | 1655 |  | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1656 | void CodeGeneratorARM64::StoreRelease(Primitive::Type type, | 
 | 1657 |                                       CPURegister src, | 
 | 1658 |                                       const MemOperand& dst) { | 
 | 1659 |   UseScratchRegisterScope temps(GetVIXLAssembler()); | 
 | 1660 |   Register temp_base = temps.AcquireX(); | 
 | 1661 |  | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1662 |   DCHECK(!dst.IsPreIndex()); | 
 | 1663 |   DCHECK(!dst.IsPostIndex()); | 
 | 1664 |  | 
 | 1665 |   // TODO(vixl): Let the MacroAssembler handle this. | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1666 |   Operand op = OperandFromMemOperand(dst); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1667 |   __ Add(temp_base, dst.GetBaseRegister(), op); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1668 |   MemOperand base = MemOperand(temp_base); | 
 | 1669 |   switch (type) { | 
 | 1670 |     case Primitive::kPrimBoolean: | 
 | 1671 |     case Primitive::kPrimByte: | 
 | 1672 |       __ Stlrb(Register(src), base); | 
 | 1673 |       break; | 
 | 1674 |     case Primitive::kPrimChar: | 
 | 1675 |     case Primitive::kPrimShort: | 
 | 1676 |       __ Stlrh(Register(src), base); | 
 | 1677 |       break; | 
 | 1678 |     case Primitive::kPrimInt: | 
 | 1679 |     case Primitive::kPrimNot: | 
 | 1680 |     case Primitive::kPrimLong: | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1681 |       DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type)); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1682 |       __ Stlr(Register(src), base); | 
 | 1683 |       break; | 
 | 1684 |     case Primitive::kPrimFloat: | 
 | 1685 |     case Primitive::kPrimDouble: { | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 1686 |       DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type)); | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 1687 |       Register temp_src; | 
 | 1688 |       if (src.IsZero()) { | 
 | 1689 |         // The zero register is used to avoid synthesizing zero constants. | 
 | 1690 |         temp_src = Register(src); | 
 | 1691 |       } else { | 
 | 1692 |         DCHECK(src.IsFPRegister()); | 
 | 1693 |         temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW(); | 
 | 1694 |         __ Fmov(temp_src, FPRegister(src)); | 
 | 1695 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1696 |  | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 1697 |       __ Stlr(temp_src, base); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1698 |       break; | 
 | 1699 |     } | 
 | 1700 |     case Primitive::kPrimVoid: | 
 | 1701 |       LOG(FATAL) << "Unreachable type " << type; | 
 | 1702 |   } | 
 | 1703 | } | 
 | 1704 |  | 
| Calin Juravle | 175dc73 | 2015-08-25 15:42:32 +0100 | [diff] [blame] | 1705 | void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint, | 
 | 1706 |                                        HInstruction* instruction, | 
 | 1707 |                                        uint32_t dex_pc, | 
 | 1708 |                                        SlowPathCode* slow_path) { | 
| Alexandre Rames | 91a6516 | 2016-09-19 13:54:30 +0100 | [diff] [blame] | 1709 |   ValidateInvokeRuntime(entrypoint, instruction, slow_path); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 1710 |   GenerateInvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()); | 
| Serban Constantinescu | da8ffec | 2016-03-09 12:02:11 +0000 | [diff] [blame] | 1711 |   if (EntrypointRequiresStackMap(entrypoint)) { | 
 | 1712 |     RecordPcInfo(instruction, dex_pc, slow_path); | 
 | 1713 |   } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1714 | } | 
 | 1715 |  | 
| Roland Levillain | dec8f63 | 2016-07-22 17:10:06 +0100 | [diff] [blame] | 1716 | void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset, | 
 | 1717 |                                                              HInstruction* instruction, | 
 | 1718 |                                                              SlowPathCode* slow_path) { | 
 | 1719 |   ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 1720 |   GenerateInvokeRuntime(entry_point_offset); | 
 | 1721 | } | 
 | 1722 |  | 
 | 1723 | void CodeGeneratorARM64::GenerateInvokeRuntime(int32_t entry_point_offset) { | 
| Roland Levillain | dec8f63 | 2016-07-22 17:10:06 +0100 | [diff] [blame] | 1724 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
 | 1725 |   __ Ldr(lr, MemOperand(tr, entry_point_offset)); | 
 | 1726 |   __ Blr(lr); | 
 | 1727 | } | 
 | 1728 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1729 | void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 1730 |                                                                      Register class_reg) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1731 |   UseScratchRegisterScope temps(GetVIXLAssembler()); | 
 | 1732 |   Register temp = temps.AcquireW(); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1733 |   size_t status_offset = mirror::Class::StatusOffset().SizeValue(); | 
 | 1734 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1735 |   // Even if the initialized flag is set, we need to ensure consistent memory ordering. | 
| Serban Constantinescu | 4a6a67c | 2016-01-27 09:19:56 +0000 | [diff] [blame] | 1736 |   // TODO(vixl): Let the MacroAssembler handle MemOperand. | 
 | 1737 |   __ Add(temp, class_reg, status_offset); | 
 | 1738 |   __ Ldar(temp, HeapOperand(temp)); | 
 | 1739 |   __ Cmp(temp, mirror::Class::kStatusInitialized); | 
 | 1740 |   __ B(lt, slow_path->GetEntryLabel()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1741 |   __ Bind(slow_path->GetExitLabel()); | 
 | 1742 | } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1743 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1744 | void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) { | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 1745 |   BarrierType type = BarrierAll; | 
 | 1746 |  | 
 | 1747 |   switch (kind) { | 
 | 1748 |     case MemBarrierKind::kAnyAny: | 
 | 1749 |     case MemBarrierKind::kAnyStore: { | 
 | 1750 |       type = BarrierAll; | 
 | 1751 |       break; | 
 | 1752 |     } | 
 | 1753 |     case MemBarrierKind::kLoadAny: { | 
 | 1754 |       type = BarrierReads; | 
 | 1755 |       break; | 
 | 1756 |     } | 
 | 1757 |     case MemBarrierKind::kStoreStore: { | 
 | 1758 |       type = BarrierWrites; | 
 | 1759 |       break; | 
 | 1760 |     } | 
 | 1761 |     default: | 
 | 1762 |       LOG(FATAL) << "Unexpected memory barrier " << kind; | 
 | 1763 |   } | 
 | 1764 |   __ Dmb(InnerShareable, type); | 
 | 1765 | } | 
 | 1766 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1767 | void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction, | 
 | 1768 |                                                          HBasicBlock* successor) { | 
 | 1769 |   SuspendCheckSlowPathARM64* slow_path = | 
| Nicolas Geoffray | db216f4 | 2015-05-05 17:02:20 +0100 | [diff] [blame] | 1770 |       down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath()); | 
 | 1771 |   if (slow_path == nullptr) { | 
 | 1772 |     slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor); | 
 | 1773 |     instruction->SetSlowPath(slow_path); | 
 | 1774 |     codegen_->AddSlowPath(slow_path); | 
 | 1775 |     if (successor != nullptr) { | 
 | 1776 |       DCHECK(successor->IsLoopHeader()); | 
 | 1777 |       codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction); | 
 | 1778 |     } | 
 | 1779 |   } else { | 
 | 1780 |     DCHECK_EQ(slow_path->GetSuccessor(), successor); | 
 | 1781 |   } | 
 | 1782 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1783 |   UseScratchRegisterScope temps(codegen_->GetVIXLAssembler()); | 
 | 1784 |   Register temp = temps.AcquireW(); | 
 | 1785 |  | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1786 |   __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue())); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 1787 |   if (successor == nullptr) { | 
 | 1788 |     __ Cbnz(temp, slow_path->GetEntryLabel()); | 
 | 1789 |     __ Bind(slow_path->GetReturnLabel()); | 
 | 1790 |   } else { | 
 | 1791 |     __ Cbz(temp, codegen_->GetLabelOf(successor)); | 
 | 1792 |     __ B(slow_path->GetEntryLabel()); | 
 | 1793 |     // slow_path will return to GetLabelOf(successor). | 
 | 1794 |   } | 
 | 1795 | } | 
 | 1796 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1797 | InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph, | 
 | 1798 |                                                              CodeGeneratorARM64* codegen) | 
| Aart Bik | 42249c3 | 2016-01-07 15:33:50 -0800 | [diff] [blame] | 1799 |       : InstructionCodeGenerator(graph, codegen), | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1800 |         assembler_(codegen->GetAssembler()), | 
 | 1801 |         codegen_(codegen) {} | 
 | 1802 |  | 
 | 1803 | #define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M)              \ | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 1804 |   /* No unimplemented IR. */ | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1805 |  | 
 | 1806 | #define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode | 
 | 1807 |  | 
 | 1808 | enum UnimplementedInstructionBreakCode { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1809 |   // Using a base helps identify when we hit such breakpoints. | 
 | 1810 |   UnimplementedInstructionBreakCodeBaseCode = 0x900, | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1811 | #define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name), | 
 | 1812 |   FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION) | 
 | 1813 | #undef ENUM_UNIMPLEMENTED_INSTRUCTION | 
 | 1814 | }; | 
 | 1815 |  | 
 | 1816 | #define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name)                               \ | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 1817 |   void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) {  \ | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1818 |     __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name));                               \ | 
 | 1819 |   }                                                                                   \ | 
 | 1820 |   void LocationsBuilderARM64::Visit##name(H##name* instr) {                           \ | 
 | 1821 |     LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \ | 
 | 1822 |     locations->SetOut(Location::Any());                                               \ | 
 | 1823 |   } | 
 | 1824 |   FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS) | 
 | 1825 | #undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS | 
 | 1826 |  | 
 | 1827 | #undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1828 | #undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1829 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1830 | void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1831 |   DCHECK_EQ(instr->InputCount(), 2U); | 
 | 1832 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); | 
 | 1833 |   Primitive::Type type = instr->GetResultType(); | 
 | 1834 |   switch (type) { | 
 | 1835 |     case Primitive::kPrimInt: | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1836 |     case Primitive::kPrimLong: | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1837 |       locations->SetInAt(0, Location::RequiresRegister()); | 
| Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 1838 |       locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr)); | 
| Alexandre Rames | fb4e5fa | 2014-11-06 12:41:16 +0000 | [diff] [blame] | 1839 |       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1840 |       break; | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1841 |  | 
 | 1842 |     case Primitive::kPrimFloat: | 
 | 1843 |     case Primitive::kPrimDouble: | 
 | 1844 |       locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 1845 |       locations->SetInAt(1, Location::RequiresFpuRegister()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1846 |       locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1847 |       break; | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1848 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1849 |     default: | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1850 |       LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type; | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1851 |   } | 
 | 1852 | } | 
 | 1853 |  | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1854 | void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1855 |   DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet()); | 
 | 1856 |  | 
 | 1857 |   bool object_field_get_with_read_barrier = | 
 | 1858 |       kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1859 |   LocationSummary* locations = | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1860 |       new (GetGraph()->GetArena()) LocationSummary(instruction, | 
 | 1861 |                                                    object_field_get_with_read_barrier ? | 
 | 1862 |                                                        LocationSummary::kCallOnSlowPath : | 
 | 1863 |                                                        LocationSummary::kNoCall); | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 1864 |   if (object_field_get_with_read_barrier && kUseBakerReadBarrier) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 1865 |     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers. | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 1866 |   } | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1867 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 1868 |   if (Primitive::IsFloatingPointType(instruction->GetType())) { | 
 | 1869 |     locations->SetOut(Location::RequiresFpuRegister()); | 
 | 1870 |   } else { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 1871 |     // The output overlaps for an object field get when read barriers | 
 | 1872 |     // are enabled: we do not want the load to overwrite the object's | 
 | 1873 |     // location, as we need it to emit the read barrier. | 
 | 1874 |     locations->SetOut( | 
 | 1875 |         Location::RequiresRegister(), | 
 | 1876 |         object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1877 |   } | 
 | 1878 | } | 
 | 1879 |  | 
 | 1880 | void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction, | 
 | 1881 |                                                    const FieldInfo& field_info) { | 
 | 1882 |   DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet()); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1883 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 1884 |   Location base_loc = locations->InAt(0); | 
 | 1885 |   Location out = locations->Out(); | 
 | 1886 |   uint32_t offset = field_info.GetFieldOffset().Uint32Value(); | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1887 |   Primitive::Type field_type = field_info.GetFieldType(); | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 1888 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1889 |   MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset()); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1890 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1891 |   if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) { | 
 | 1892 |     // Object FieldGet with Baker's read barrier case. | 
 | 1893 |     MacroAssembler* masm = GetVIXLAssembler(); | 
 | 1894 |     UseScratchRegisterScope temps(masm); | 
 | 1895 |     // /* HeapReference<Object> */ out = *(base + offset) | 
 | 1896 |     Register base = RegisterFrom(base_loc, Primitive::kPrimNot); | 
 | 1897 |     Register temp = temps.AcquireW(); | 
 | 1898 |     // Note that potential implicit null checks are handled in this | 
 | 1899 |     // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call. | 
 | 1900 |     codegen_->GenerateFieldLoadWithBakerReadBarrier( | 
 | 1901 |         instruction, | 
 | 1902 |         out, | 
 | 1903 |         base, | 
 | 1904 |         offset, | 
 | 1905 |         temp, | 
 | 1906 |         /* needs_null_check */ true, | 
| Serban Constantinescu | 4a6a67c | 2016-01-27 09:19:56 +0000 | [diff] [blame] | 1907 |         field_info.IsVolatile()); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1908 |   } else { | 
 | 1909 |     // General case. | 
 | 1910 |     if (field_info.IsVolatile()) { | 
| Serban Constantinescu | 4a6a67c | 2016-01-27 09:19:56 +0000 | [diff] [blame] | 1911 |       // Note that a potential implicit null check is handled in this | 
 | 1912 |       // CodeGeneratorARM64::LoadAcquire call. | 
 | 1913 |       // NB: LoadAcquire will record the pc info if needed. | 
 | 1914 |       codegen_->LoadAcquire( | 
 | 1915 |           instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1916 |     } else { | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1917 |       codegen_->Load(field_type, OutputCPURegister(instruction), field); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1918 |       codegen_->MaybeRecordImplicitNullCheck(instruction); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1919 |     } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 1920 |     if (field_type == Primitive::kPrimNot) { | 
 | 1921 |       // If read barriers are enabled, emit read barriers other than | 
 | 1922 |       // Baker's using a slow path (and also unpoison the loaded | 
 | 1923 |       // reference, if heap poisoning is enabled). | 
 | 1924 |       codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset); | 
 | 1925 |     } | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1926 |   } | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1927 | } | 
 | 1928 |  | 
 | 1929 | void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) { | 
 | 1930 |   LocationSummary* locations = | 
 | 1931 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); | 
 | 1932 |   locations->SetInAt(0, Location::RequiresRegister()); | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 1933 |   if (IsConstantZeroBitPattern(instruction->InputAt(1))) { | 
 | 1934 |     locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant())); | 
 | 1935 |   } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1936 |     locations->SetInAt(1, Location::RequiresFpuRegister()); | 
 | 1937 |   } else { | 
 | 1938 |     locations->SetInAt(1, Location::RequiresRegister()); | 
 | 1939 |   } | 
 | 1940 | } | 
 | 1941 |  | 
 | 1942 | void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction, | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 1943 |                                                    const FieldInfo& field_info, | 
 | 1944 |                                                    bool value_can_be_null) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1945 |   DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet()); | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 1946 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1947 |  | 
 | 1948 |   Register obj = InputRegisterAt(instruction, 0); | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 1949 |   CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1); | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1950 |   CPURegister source = value; | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1951 |   Offset offset = field_info.GetFieldOffset(); | 
 | 1952 |   Primitive::Type field_type = field_info.GetFieldType(); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1953 |  | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1954 |   { | 
 | 1955 |     // We use a block to end the scratch scope before the write barrier, thus | 
 | 1956 |     // freeing the temporary registers so they can be used in `MarkGCCard`. | 
 | 1957 |     UseScratchRegisterScope temps(GetVIXLAssembler()); | 
 | 1958 |  | 
 | 1959 |     if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) { | 
 | 1960 |       DCHECK(value.IsW()); | 
 | 1961 |       Register temp = temps.AcquireW(); | 
 | 1962 |       __ Mov(temp, value.W()); | 
 | 1963 |       GetAssembler()->PoisonHeapReference(temp.W()); | 
 | 1964 |       source = temp; | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1965 |     } | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1966 |  | 
 | 1967 |     if (field_info.IsVolatile()) { | 
| Serban Constantinescu | 4a6a67c | 2016-01-27 09:19:56 +0000 | [diff] [blame] | 1968 |       codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset)); | 
 | 1969 |       codegen_->MaybeRecordImplicitNullCheck(instruction); | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1970 |     } else { | 
 | 1971 |       codegen_->Store(field_type, source, HeapOperand(obj, offset)); | 
 | 1972 |       codegen_->MaybeRecordImplicitNullCheck(instruction); | 
 | 1973 |     } | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1974 |   } | 
 | 1975 |  | 
 | 1976 |   if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) { | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 1977 |     codegen_->MarkGCCard(obj, Register(value), value_can_be_null); | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 1978 |   } | 
 | 1979 | } | 
 | 1980 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1981 | void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1982 |   Primitive::Type type = instr->GetType(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1983 |  | 
 | 1984 |   switch (type) { | 
 | 1985 |     case Primitive::kPrimInt: | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 1986 |     case Primitive::kPrimLong: { | 
 | 1987 |       Register dst = OutputRegister(instr); | 
 | 1988 |       Register lhs = InputRegisterAt(instr, 0); | 
 | 1989 |       Operand rhs = InputOperandAt(instr, 1); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1990 |       if (instr->IsAdd()) { | 
 | 1991 |         __ Add(dst, lhs, rhs); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 1992 |       } else if (instr->IsAnd()) { | 
 | 1993 |         __ And(dst, lhs, rhs); | 
 | 1994 |       } else if (instr->IsOr()) { | 
 | 1995 |         __ Orr(dst, lhs, rhs); | 
 | 1996 |       } else if (instr->IsSub()) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 1997 |         __ Sub(dst, lhs, rhs); | 
| Scott Wakeling | 40a04bf | 2015-12-11 09:50:36 +0000 | [diff] [blame] | 1998 |       } else if (instr->IsRor()) { | 
 | 1999 |         if (rhs.IsImmediate()) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 2000 |           uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1); | 
| Scott Wakeling | 40a04bf | 2015-12-11 09:50:36 +0000 | [diff] [blame] | 2001 |           __ Ror(dst, lhs, shift); | 
 | 2002 |         } else { | 
 | 2003 |           // Ensure shift distance is in the same size register as the result. If | 
 | 2004 |           // we are rotating a long and the shift comes in a w register originally, | 
 | 2005 |           // we don't need to sxtw for use as an x since the shift distances are | 
 | 2006 |           // all & reg_bits - 1. | 
 | 2007 |           __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type)); | 
 | 2008 |         } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2009 |       } else { | 
 | 2010 |         DCHECK(instr->IsXor()); | 
 | 2011 |         __ Eor(dst, lhs, rhs); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2012 |       } | 
 | 2013 |       break; | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 2014 |     } | 
 | 2015 |     case Primitive::kPrimFloat: | 
 | 2016 |     case Primitive::kPrimDouble: { | 
 | 2017 |       FPRegister dst = OutputFPRegister(instr); | 
 | 2018 |       FPRegister lhs = InputFPRegisterAt(instr, 0); | 
 | 2019 |       FPRegister rhs = InputFPRegisterAt(instr, 1); | 
 | 2020 |       if (instr->IsAdd()) { | 
 | 2021 |         __ Fadd(dst, lhs, rhs); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2022 |       } else if (instr->IsSub()) { | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 2023 |         __ Fsub(dst, lhs, rhs); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2024 |       } else { | 
 | 2025 |         LOG(FATAL) << "Unexpected floating-point binary operation"; | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 2026 |       } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2027 |       break; | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 2028 |     } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2029 |     default: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2030 |       LOG(FATAL) << "Unexpected binary operation type " << type; | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2031 |   } | 
 | 2032 | } | 
 | 2033 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2034 | void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) { | 
 | 2035 |   DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr()); | 
 | 2036 |  | 
 | 2037 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); | 
 | 2038 |   Primitive::Type type = instr->GetResultType(); | 
 | 2039 |   switch (type) { | 
 | 2040 |     case Primitive::kPrimInt: | 
 | 2041 |     case Primitive::kPrimLong: { | 
 | 2042 |       locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2043 |       locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1))); | 
 | 2044 |       locations->SetOut(Location::RequiresRegister()); | 
 | 2045 |       break; | 
 | 2046 |     } | 
 | 2047 |     default: | 
 | 2048 |       LOG(FATAL) << "Unexpected shift type " << type; | 
 | 2049 |   } | 
 | 2050 | } | 
 | 2051 |  | 
 | 2052 | void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) { | 
 | 2053 |   DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr()); | 
 | 2054 |  | 
 | 2055 |   Primitive::Type type = instr->GetType(); | 
 | 2056 |   switch (type) { | 
 | 2057 |     case Primitive::kPrimInt: | 
 | 2058 |     case Primitive::kPrimLong: { | 
 | 2059 |       Register dst = OutputRegister(instr); | 
 | 2060 |       Register lhs = InputRegisterAt(instr, 0); | 
 | 2061 |       Operand rhs = InputOperandAt(instr, 1); | 
 | 2062 |       if (rhs.IsImmediate()) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 2063 |         uint32_t shift_value = rhs.GetImmediate() & | 
| Roland Levillain | 5b5b931 | 2016-03-22 14:57:31 +0000 | [diff] [blame] | 2064 |             (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2065 |         if (instr->IsShl()) { | 
 | 2066 |           __ Lsl(dst, lhs, shift_value); | 
 | 2067 |         } else if (instr->IsShr()) { | 
 | 2068 |           __ Asr(dst, lhs, shift_value); | 
 | 2069 |         } else { | 
 | 2070 |           __ Lsr(dst, lhs, shift_value); | 
 | 2071 |         } | 
 | 2072 |       } else { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 2073 |         Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W(); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2074 |  | 
 | 2075 |         if (instr->IsShl()) { | 
 | 2076 |           __ Lsl(dst, lhs, rhs_reg); | 
 | 2077 |         } else if (instr->IsShr()) { | 
 | 2078 |           __ Asr(dst, lhs, rhs_reg); | 
 | 2079 |         } else { | 
 | 2080 |           __ Lsr(dst, lhs, rhs_reg); | 
 | 2081 |         } | 
 | 2082 |       } | 
 | 2083 |       break; | 
 | 2084 |     } | 
 | 2085 |     default: | 
 | 2086 |       LOG(FATAL) << "Unexpected shift operation type " << type; | 
 | 2087 |   } | 
 | 2088 | } | 
 | 2089 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2090 | void LocationsBuilderARM64::VisitAdd(HAdd* instruction) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2091 |   HandleBinaryOp(instruction); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2092 | } | 
 | 2093 |  | 
 | 2094 | void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2095 |   HandleBinaryOp(instruction); | 
 | 2096 | } | 
 | 2097 |  | 
 | 2098 | void LocationsBuilderARM64::VisitAnd(HAnd* instruction) { | 
 | 2099 |   HandleBinaryOp(instruction); | 
 | 2100 | } | 
 | 2101 |  | 
 | 2102 | void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) { | 
 | 2103 |   HandleBinaryOp(instruction); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2104 | } | 
 | 2105 |  | 
| Artem Serov | 7fc6350 | 2016-02-09 17:15:29 +0000 | [diff] [blame] | 2106 | void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) { | 
| Kevin Brodsky | 9ff0d20 | 2016-01-11 13:43:31 +0000 | [diff] [blame] | 2107 |   DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType(); | 
 | 2108 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); | 
 | 2109 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2110 |   // There is no immediate variant of negated bitwise instructions in AArch64. | 
 | 2111 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 2112 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 2113 | } | 
 | 2114 |  | 
| Artem Serov | 7fc6350 | 2016-02-09 17:15:29 +0000 | [diff] [blame] | 2115 | void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) { | 
| Kevin Brodsky | 9ff0d20 | 2016-01-11 13:43:31 +0000 | [diff] [blame] | 2116 |   Register dst = OutputRegister(instr); | 
 | 2117 |   Register lhs = InputRegisterAt(instr, 0); | 
 | 2118 |   Register rhs = InputRegisterAt(instr, 1); | 
 | 2119 |  | 
 | 2120 |   switch (instr->GetOpKind()) { | 
 | 2121 |     case HInstruction::kAnd: | 
 | 2122 |       __ Bic(dst, lhs, rhs); | 
 | 2123 |       break; | 
 | 2124 |     case HInstruction::kOr: | 
 | 2125 |       __ Orn(dst, lhs, rhs); | 
 | 2126 |       break; | 
 | 2127 |     case HInstruction::kXor: | 
 | 2128 |       __ Eon(dst, lhs, rhs); | 
 | 2129 |       break; | 
 | 2130 |     default: | 
 | 2131 |       LOG(FATAL) << "Unreachable"; | 
 | 2132 |   } | 
 | 2133 | } | 
 | 2134 |  | 
| Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 2135 | void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp( | 
 | 2136 |     HArm64DataProcWithShifterOp* instruction) { | 
 | 2137 |   DCHECK(instruction->GetType() == Primitive::kPrimInt || | 
 | 2138 |          instruction->GetType() == Primitive::kPrimLong); | 
 | 2139 |   LocationSummary* locations = | 
 | 2140 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); | 
 | 2141 |   if (instruction->GetInstrKind() == HInstruction::kNeg) { | 
 | 2142 |     locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant())); | 
 | 2143 |   } else { | 
 | 2144 |     locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2145 |   } | 
 | 2146 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 2147 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 2148 | } | 
 | 2149 |  | 
 | 2150 | void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp( | 
 | 2151 |     HArm64DataProcWithShifterOp* instruction) { | 
 | 2152 |   Primitive::Type type = instruction->GetType(); | 
 | 2153 |   HInstruction::InstructionKind kind = instruction->GetInstrKind(); | 
 | 2154 |   DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); | 
 | 2155 |   Register out = OutputRegister(instruction); | 
 | 2156 |   Register left; | 
 | 2157 |   if (kind != HInstruction::kNeg) { | 
 | 2158 |     left = InputRegisterAt(instruction, 0); | 
 | 2159 |   } | 
 | 2160 |   // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the | 
 | 2161 |   // shifter operand operation, the IR generating `right_reg` (input to the type | 
 | 2162 |   // conversion) can have a different type from the current instruction's type, | 
 | 2163 |   // so we manually indicate the type. | 
 | 2164 |   Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type); | 
| Roland Levillain | 5b5b931 | 2016-03-22 14:57:31 +0000 | [diff] [blame] | 2165 |   int64_t shift_amount = instruction->GetShiftAmount() & | 
 | 2166 |       (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance); | 
| Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 2167 |  | 
 | 2168 |   Operand right_operand(0); | 
 | 2169 |  | 
 | 2170 |   HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind(); | 
 | 2171 |   if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) { | 
 | 2172 |     right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind)); | 
 | 2173 |   } else { | 
 | 2174 |     right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount); | 
 | 2175 |   } | 
 | 2176 |  | 
 | 2177 |   // Logical binary operations do not support extension operations in the | 
 | 2178 |   // operand. Note that VIXL would still manage if it was passed by generating | 
 | 2179 |   // the extension as a separate instruction. | 
 | 2180 |   // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`. | 
 | 2181 |   DCHECK(!right_operand.IsExtendedRegister() || | 
 | 2182 |          (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor && | 
 | 2183 |           kind != HInstruction::kNeg)); | 
 | 2184 |   switch (kind) { | 
 | 2185 |     case HInstruction::kAdd: | 
 | 2186 |       __ Add(out, left, right_operand); | 
 | 2187 |       break; | 
 | 2188 |     case HInstruction::kAnd: | 
 | 2189 |       __ And(out, left, right_operand); | 
 | 2190 |       break; | 
 | 2191 |     case HInstruction::kNeg: | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 2192 |       DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero()); | 
| Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 2193 |       __ Neg(out, right_operand); | 
 | 2194 |       break; | 
 | 2195 |     case HInstruction::kOr: | 
 | 2196 |       __ Orr(out, left, right_operand); | 
 | 2197 |       break; | 
 | 2198 |     case HInstruction::kSub: | 
 | 2199 |       __ Sub(out, left, right_operand); | 
 | 2200 |       break; | 
 | 2201 |     case HInstruction::kXor: | 
 | 2202 |       __ Eor(out, left, right_operand); | 
 | 2203 |       break; | 
 | 2204 |     default: | 
 | 2205 |       LOG(FATAL) << "Unexpected operation kind: " << kind; | 
 | 2206 |       UNREACHABLE(); | 
 | 2207 |   } | 
 | 2208 | } | 
 | 2209 |  | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2210 | void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) { | 
| Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 2211 |   LocationSummary* locations = | 
 | 2212 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); | 
 | 2213 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2214 |   locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction)); | 
 | 2215 |   locations->SetOut(Location::RequiresRegister()); | 
 | 2216 | } | 
 | 2217 |  | 
| Roland Levillain | 19c5419 | 2016-11-04 13:44:09 +0000 | [diff] [blame] | 2218 | void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) { | 
| Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 2219 |   __ Add(OutputRegister(instruction), | 
 | 2220 |          InputRegisterAt(instruction, 0), | 
 | 2221 |          Operand(InputOperandAt(instruction, 1))); | 
 | 2222 | } | 
 | 2223 |  | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2224 | void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) { | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2225 |   LocationSummary* locations = | 
 | 2226 |       new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall); | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2227 |   HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex); | 
 | 2228 |   if (instr->GetOpKind() == HInstruction::kSub && | 
 | 2229 |       accumulator->IsConstant() && | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 2230 |       accumulator->AsConstant()->IsArithmeticZero()) { | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2231 |     // Don't allocate register for Mneg instruction. | 
 | 2232 |   } else { | 
 | 2233 |     locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex, | 
 | 2234 |                        Location::RequiresRegister()); | 
 | 2235 |   } | 
 | 2236 |   locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister()); | 
 | 2237 |   locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister()); | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2238 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 2239 | } | 
 | 2240 |  | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2241 | void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) { | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2242 |   Register res = OutputRegister(instr); | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2243 |   Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex); | 
 | 2244 |   Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex); | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2245 |  | 
 | 2246 |   // Avoid emitting code that could trigger Cortex A53's erratum 835769. | 
 | 2247 |   // This fixup should be carried out for all multiply-accumulate instructions: | 
 | 2248 |   // madd, msub, smaddl, smsubl, umaddl and umsubl. | 
 | 2249 |   if (instr->GetType() == Primitive::kPrimLong && | 
 | 2250 |       codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) { | 
 | 2251 |     MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler(); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 2252 |     vixl::aarch64::Instruction* prev = | 
 | 2253 |         masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize; | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2254 |     if (prev->IsLoadOrStore()) { | 
 | 2255 |       // Make sure we emit only exactly one nop. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 2256 |       vixl::aarch64::CodeBufferCheckScope scope(masm, | 
 | 2257 |                                                 kInstructionSize, | 
 | 2258 |                                                 vixl::aarch64::CodeBufferCheckScope::kCheck, | 
 | 2259 |                                                 vixl::aarch64::CodeBufferCheckScope::kExactSize); | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2260 |       __ nop(); | 
 | 2261 |     } | 
 | 2262 |   } | 
 | 2263 |  | 
 | 2264 |   if (instr->GetOpKind() == HInstruction::kAdd) { | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2265 |     Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex); | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2266 |     __ Madd(res, mul_left, mul_right, accumulator); | 
 | 2267 |   } else { | 
 | 2268 |     DCHECK(instr->GetOpKind() == HInstruction::kSub); | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2269 |     HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex); | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 2270 |     if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) { | 
| Artem Udovichenko | 4a0dad6 | 2016-01-26 12:28:31 +0300 | [diff] [blame] | 2271 |       __ Mneg(res, mul_left, mul_right); | 
 | 2272 |     } else { | 
 | 2273 |       Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex); | 
 | 2274 |       __ Msub(res, mul_left, mul_right, accumulator); | 
 | 2275 |     } | 
| Alexandre Rames | 418318f | 2015-11-20 15:55:47 +0000 | [diff] [blame] | 2276 |   } | 
 | 2277 | } | 
 | 2278 |  | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2279 | void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2280 |   bool object_array_get_with_read_barrier = | 
 | 2281 |       kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2282 |   LocationSummary* locations = | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2283 |       new (GetGraph()->GetArena()) LocationSummary(instruction, | 
 | 2284 |                                                    object_array_get_with_read_barrier ? | 
 | 2285 |                                                        LocationSummary::kCallOnSlowPath : | 
 | 2286 |                                                        LocationSummary::kNoCall); | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 2287 |   if (object_array_get_with_read_barrier && kUseBakerReadBarrier) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 2288 |     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers. | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 2289 |   } | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2290 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2291 |   locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); | 
| Alexandre Rames | 88c13cd | 2015-04-14 17:35:39 +0100 | [diff] [blame] | 2292 |   if (Primitive::IsFloatingPointType(instruction->GetType())) { | 
 | 2293 |     locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); | 
 | 2294 |   } else { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2295 |     // The output overlaps in the case of an object array get with | 
 | 2296 |     // read barriers enabled: we do not want the move to overwrite the | 
 | 2297 |     // array's location, as we need it to emit the read barrier. | 
 | 2298 |     locations->SetOut( | 
 | 2299 |         Location::RequiresRegister(), | 
 | 2300 |         object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap); | 
| Alexandre Rames | 88c13cd | 2015-04-14 17:35:39 +0100 | [diff] [blame] | 2301 |   } | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2302 | } | 
 | 2303 |  | 
 | 2304 | void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) { | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2305 |   Primitive::Type type = instruction->GetType(); | 
 | 2306 |   Register obj = InputRegisterAt(instruction, 0); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2307 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 2308 |   Location index = locations->InAt(1); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2309 |   Location out = locations->Out(); | 
| Vladimir Marko | 87f3fcb | 2016-04-28 15:52:11 +0100 | [diff] [blame] | 2310 |   uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction); | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2311 |   const bool maybe_compressed_char_at = mirror::kUseStringCompression && | 
 | 2312 |                                         instruction->IsStringCharAt(); | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 2313 |   MacroAssembler* masm = GetVIXLAssembler(); | 
 | 2314 |   UseScratchRegisterScope temps(masm); | 
| Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 2315 |   // Block pools between `Load` and `MaybeRecordImplicitNullCheck`. | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 2316 |   BlockPoolsScope block_pools(masm); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2317 |  | 
| Roland Levillain | 19c5419 | 2016-11-04 13:44:09 +0000 | [diff] [blame] | 2318 |   // The read barrier instrumentation of object ArrayGet instructions | 
 | 2319 |   // does not support the HIntermediateAddress instruction. | 
 | 2320 |   DCHECK(!((type == Primitive::kPrimNot) && | 
 | 2321 |            instruction->GetArray()->IsIntermediateAddress() && | 
 | 2322 |            kEmitCompilerReadBarrier)); | 
 | 2323 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2324 |   if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) { | 
 | 2325 |     // Object ArrayGet with Baker's read barrier case. | 
 | 2326 |     Register temp = temps.AcquireW(); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2327 |     // Note that a potential implicit null check is handled in the | 
 | 2328 |     // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call. | 
 | 2329 |     codegen_->GenerateArrayLoadWithBakerReadBarrier( | 
 | 2330 |         instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2331 |   } else { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2332 |     // General case. | 
 | 2333 |     MemOperand source = HeapOperand(obj); | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2334 |     Register length; | 
 | 2335 |     if (maybe_compressed_char_at) { | 
 | 2336 |       uint32_t count_offset = mirror::String::CountOffset().Uint32Value(); | 
 | 2337 |       length = temps.AcquireW(); | 
 | 2338 |       __ Ldr(length, HeapOperand(obj, count_offset)); | 
 | 2339 |       codegen_->MaybeRecordImplicitNullCheck(instruction); | 
 | 2340 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2341 |     if (index.IsConstant()) { | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2342 |       if (maybe_compressed_char_at) { | 
 | 2343 |         vixl::aarch64::Label uncompressed_load, done; | 
 | 2344 |         __ Tbz(length.W(), kWRegSize - 1, &uncompressed_load); | 
 | 2345 |         __ Ldrb(Register(OutputCPURegister(instruction)), | 
 | 2346 |                 HeapOperand(obj, offset + Int64ConstantFrom(index))); | 
 | 2347 |         __ B(&done); | 
 | 2348 |         __ Bind(&uncompressed_load); | 
 | 2349 |         __ Ldrh(Register(OutputCPURegister(instruction)), | 
 | 2350 |                 HeapOperand(obj, offset + (Int64ConstantFrom(index) << 1))); | 
 | 2351 |         __ Bind(&done); | 
 | 2352 |       } else { | 
 | 2353 |         offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type); | 
 | 2354 |         source = HeapOperand(obj, offset); | 
 | 2355 |       } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2356 |     } else { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2357 |       Register temp = temps.AcquireSameSizeAs(obj); | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2358 |       if (instruction->GetArray()->IsIntermediateAddress()) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2359 |         // We do not need to compute the intermediate address from the array: the | 
 | 2360 |         // input instruction has done it already. See the comment in | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2361 |         // `TryExtractArrayAccessAddress()`. | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2362 |         if (kIsDebugBuild) { | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2363 |           HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress(); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2364 |           DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset); | 
 | 2365 |         } | 
 | 2366 |         temp = obj; | 
 | 2367 |       } else { | 
 | 2368 |         __ Add(temp, obj, offset); | 
 | 2369 |       } | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2370 |       if (maybe_compressed_char_at) { | 
 | 2371 |         vixl::aarch64::Label uncompressed_load, done; | 
 | 2372 |         __ Tbz(length.W(), kWRegSize - 1, &uncompressed_load); | 
 | 2373 |         __ Ldrb(Register(OutputCPURegister(instruction)), | 
 | 2374 |                 HeapOperand(temp, XRegisterFrom(index), LSL, 0)); | 
 | 2375 |         __ B(&done); | 
 | 2376 |         __ Bind(&uncompressed_load); | 
 | 2377 |         __ Ldrh(Register(OutputCPURegister(instruction)), | 
 | 2378 |                 HeapOperand(temp, XRegisterFrom(index), LSL, 1)); | 
 | 2379 |         __ Bind(&done); | 
 | 2380 |       } else { | 
 | 2381 |         source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type)); | 
 | 2382 |       } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2383 |     } | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2384 |     if (!maybe_compressed_char_at) { | 
 | 2385 |       codegen_->Load(type, OutputCPURegister(instruction), source); | 
 | 2386 |       codegen_->MaybeRecordImplicitNullCheck(instruction); | 
 | 2387 |     } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 2388 |  | 
 | 2389 |     if (type == Primitive::kPrimNot) { | 
 | 2390 |       static_assert( | 
 | 2391 |           sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t), | 
 | 2392 |           "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes."); | 
 | 2393 |       Location obj_loc = locations->InAt(0); | 
 | 2394 |       if (index.IsConstant()) { | 
 | 2395 |         codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset); | 
 | 2396 |       } else { | 
 | 2397 |         codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index); | 
 | 2398 |       } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2399 |     } | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 2400 |   } | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2401 | } | 
 | 2402 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2403 | void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) { | 
 | 2404 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); | 
 | 2405 |   locations->SetInAt(0, Location::RequiresRegister()); | 
| Alexandre Rames | fb4e5fa | 2014-11-06 12:41:16 +0000 | [diff] [blame] | 2406 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2407 | } | 
 | 2408 |  | 
 | 2409 | void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) { | 
| Vladimir Marko | dce016e | 2016-04-28 13:10:02 +0100 | [diff] [blame] | 2410 |   uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction); | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2411 |   vixl::aarch64::Register out = OutputRegister(instruction); | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 2412 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2413 |   __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset)); | 
| Calin Juravle | 77520bc | 2015-01-12 18:45:46 +0000 | [diff] [blame] | 2414 |   codegen_->MaybeRecordImplicitNullCheck(instruction); | 
| jessicahandojo | 0576575 | 2016-09-09 19:01:32 -0700 | [diff] [blame] | 2415 |   // Mask out compression flag from String's array length. | 
 | 2416 |   if (mirror::kUseStringCompression && instruction->IsStringLength()) { | 
 | 2417 |     __ And(out.W(), out.W(), Operand(static_cast<int32_t>(INT32_MAX))); | 
 | 2418 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2419 | } | 
 | 2420 |  | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2421 | void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2422 |   Primitive::Type value_type = instruction->GetComponentType(); | 
 | 2423 |  | 
 | 2424 |   bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck(); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2425 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( | 
 | 2426 |       instruction, | 
| Vladimir Marko | 8d49fd7 | 2016-08-25 15:20:47 +0100 | [diff] [blame] | 2427 |       may_need_runtime_call_for_type_check ? | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2428 |           LocationSummary::kCallOnSlowPath : | 
 | 2429 |           LocationSummary::kNoCall); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2430 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2431 |   locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 2432 |   if (IsConstantZeroBitPattern(instruction->InputAt(2))) { | 
 | 2433 |     locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant())); | 
 | 2434 |   } else if (Primitive::IsFloatingPointType(value_type)) { | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2435 |     locations->SetInAt(2, Location::RequiresFpuRegister()); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2436 |   } else { | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2437 |     locations->SetInAt(2, Location::RequiresRegister()); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2438 |   } | 
 | 2439 | } | 
 | 2440 |  | 
 | 2441 | void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) { | 
 | 2442 |   Primitive::Type value_type = instruction->GetComponentType(); | 
| Alexandre Rames | 97833a0 | 2015-04-16 15:07:12 +0100 | [diff] [blame] | 2443 |   LocationSummary* locations = instruction->GetLocations(); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2444 |   bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck(); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2445 |   bool needs_write_barrier = | 
 | 2446 |       CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue()); | 
| Alexandre Rames | 97833a0 | 2015-04-16 15:07:12 +0100 | [diff] [blame] | 2447 |  | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2448 |   Register array = InputRegisterAt(instruction, 0); | 
| Alexandre Rames | be919d9 | 2016-08-23 18:33:36 +0100 | [diff] [blame] | 2449 |   CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2450 |   CPURegister source = value; | 
 | 2451 |   Location index = locations->InAt(1); | 
 | 2452 |   size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value(); | 
 | 2453 |   MemOperand destination = HeapOperand(array); | 
 | 2454 |   MacroAssembler* masm = GetVIXLAssembler(); | 
 | 2455 |   BlockPoolsScope block_pools(masm); | 
 | 2456 |  | 
 | 2457 |   if (!needs_write_barrier) { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2458 |     DCHECK(!may_need_runtime_call_for_type_check); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2459 |     if (index.IsConstant()) { | 
 | 2460 |       offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type); | 
 | 2461 |       destination = HeapOperand(array, offset); | 
 | 2462 |     } else { | 
 | 2463 |       UseScratchRegisterScope temps(masm); | 
 | 2464 |       Register temp = temps.AcquireSameSizeAs(array); | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2465 |       if (instruction->GetArray()->IsIntermediateAddress()) { | 
| Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 2466 |         // We do not need to compute the intermediate address from the array: the | 
 | 2467 |         // input instruction has done it already. See the comment in | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2468 |         // `TryExtractArrayAccessAddress()`. | 
| Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 2469 |         if (kIsDebugBuild) { | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2470 |           HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress(); | 
| Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 2471 |           DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset); | 
 | 2472 |         } | 
 | 2473 |         temp = array; | 
 | 2474 |       } else { | 
 | 2475 |         __ Add(temp, array, offset); | 
 | 2476 |       } | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2477 |       destination = HeapOperand(temp, | 
 | 2478 |                                 XRegisterFrom(index), | 
 | 2479 |                                 LSL, | 
 | 2480 |                                 Primitive::ComponentSizeShift(value_type)); | 
 | 2481 |     } | 
 | 2482 |     codegen_->Store(value_type, value, destination); | 
 | 2483 |     codegen_->MaybeRecordImplicitNullCheck(instruction); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2484 |   } else { | 
| Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 2485 |     DCHECK(!instruction->GetArray()->IsIntermediateAddress()); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 2486 |     vixl::aarch64::Label done; | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2487 |     SlowPathCodeARM64* slow_path = nullptr; | 
| Alexandre Rames | 97833a0 | 2015-04-16 15:07:12 +0100 | [diff] [blame] | 2488 |     { | 
 | 2489 |       // We use a block to end the scratch scope before the write barrier, thus | 
 | 2490 |       // freeing the temporary registers so they can be used in `MarkGCCard`. | 
 | 2491 |       UseScratchRegisterScope temps(masm); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2492 |       Register temp = temps.AcquireSameSizeAs(array); | 
| Alexandre Rames | 97833a0 | 2015-04-16 15:07:12 +0100 | [diff] [blame] | 2493 |       if (index.IsConstant()) { | 
 | 2494 |         offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2495 |         destination = HeapOperand(array, offset); | 
| Alexandre Rames | 97833a0 | 2015-04-16 15:07:12 +0100 | [diff] [blame] | 2496 |       } else { | 
| Alexandre Rames | 82000b0 | 2015-07-07 11:34:16 +0100 | [diff] [blame] | 2497 |         destination = HeapOperand(temp, | 
 | 2498 |                                   XRegisterFrom(index), | 
 | 2499 |                                   LSL, | 
 | 2500 |                                   Primitive::ComponentSizeShift(value_type)); | 
| Alexandre Rames | 97833a0 | 2015-04-16 15:07:12 +0100 | [diff] [blame] | 2501 |       } | 
 | 2502 |  | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2503 |       uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); | 
 | 2504 |       uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value(); | 
 | 2505 |       uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value(); | 
 | 2506 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2507 |       if (may_need_runtime_call_for_type_check) { | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2508 |         slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction); | 
 | 2509 |         codegen_->AddSlowPath(slow_path); | 
 | 2510 |         if (instruction->GetValueCanBeNull()) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 2511 |           vixl::aarch64::Label non_zero; | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2512 |           __ Cbnz(Register(value), &non_zero); | 
 | 2513 |           if (!index.IsConstant()) { | 
 | 2514 |             __ Add(temp, array, offset); | 
 | 2515 |           } | 
 | 2516 |           __ Str(wzr, destination); | 
 | 2517 |           codegen_->MaybeRecordImplicitNullCheck(instruction); | 
 | 2518 |           __ B(&done); | 
 | 2519 |           __ Bind(&non_zero); | 
 | 2520 |         } | 
 | 2521 |  | 
| Roland Levillain | 9d6e1f8 | 2016-09-05 15:57:33 +0100 | [diff] [blame] | 2522 |         // Note that when Baker read barriers are enabled, the type | 
 | 2523 |         // checks are performed without read barriers.  This is fine, | 
 | 2524 |         // even in the case where a class object is in the from-space | 
 | 2525 |         // after the flip, as a comparison involving such a type would | 
 | 2526 |         // not produce a false positive; it may of course produce a | 
 | 2527 |         // false negative, in which case we would take the ArraySet | 
 | 2528 |         // slow path. | 
| Roland Levillain | 16d9f94 | 2016-08-25 17:27:56 +0100 | [diff] [blame] | 2529 |  | 
| Roland Levillain | 9d6e1f8 | 2016-09-05 15:57:33 +0100 | [diff] [blame] | 2530 |         Register temp2 = temps.AcquireSameSizeAs(array); | 
 | 2531 |         // /* HeapReference<Class> */ temp = array->klass_ | 
 | 2532 |         __ Ldr(temp, HeapOperand(array, class_offset)); | 
 | 2533 |         codegen_->MaybeRecordImplicitNullCheck(instruction); | 
 | 2534 |         GetAssembler()->MaybeUnpoisonHeapReference(temp); | 
| Roland Levillain | 16d9f94 | 2016-08-25 17:27:56 +0100 | [diff] [blame] | 2535 |  | 
| Roland Levillain | 9d6e1f8 | 2016-09-05 15:57:33 +0100 | [diff] [blame] | 2536 |         // /* HeapReference<Class> */ temp = temp->component_type_ | 
 | 2537 |         __ Ldr(temp, HeapOperand(temp, component_offset)); | 
 | 2538 |         // /* HeapReference<Class> */ temp2 = value->klass_ | 
 | 2539 |         __ Ldr(temp2, HeapOperand(Register(value), class_offset)); | 
 | 2540 |         // If heap poisoning is enabled, no need to unpoison `temp` | 
 | 2541 |         // nor `temp2`, as we are comparing two poisoned references. | 
 | 2542 |         __ Cmp(temp, temp2); | 
 | 2543 |         temps.Release(temp2); | 
| Roland Levillain | 16d9f94 | 2016-08-25 17:27:56 +0100 | [diff] [blame] | 2544 |  | 
| Roland Levillain | 9d6e1f8 | 2016-09-05 15:57:33 +0100 | [diff] [blame] | 2545 |         if (instruction->StaticTypeOfArrayIsObjectArray()) { | 
 | 2546 |           vixl::aarch64::Label do_put; | 
 | 2547 |           __ B(eq, &do_put); | 
 | 2548 |           // If heap poisoning is enabled, the `temp` reference has | 
 | 2549 |           // not been unpoisoned yet; unpoison it now. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2550 |           GetAssembler()->MaybeUnpoisonHeapReference(temp); | 
 | 2551 |  | 
| Roland Levillain | 9d6e1f8 | 2016-09-05 15:57:33 +0100 | [diff] [blame] | 2552 |           // /* HeapReference<Class> */ temp = temp->super_class_ | 
 | 2553 |           __ Ldr(temp, HeapOperand(temp, super_offset)); | 
 | 2554 |           // If heap poisoning is enabled, no need to unpoison | 
 | 2555 |           // `temp`, as we are comparing against null below. | 
 | 2556 |           __ Cbnz(temp, slow_path->GetEntryLabel()); | 
 | 2557 |           __ Bind(&do_put); | 
 | 2558 |         } else { | 
 | 2559 |           __ B(ne, slow_path->GetEntryLabel()); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2560 |         } | 
 | 2561 |       } | 
 | 2562 |  | 
 | 2563 |       if (kPoisonHeapReferences) { | 
| Nicolas Geoffray | a8a0fe2 | 2015-10-01 15:50:27 +0100 | [diff] [blame] | 2564 |         Register temp2 = temps.AcquireSameSizeAs(array); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2565 |           DCHECK(value.IsW()); | 
| Nicolas Geoffray | a8a0fe2 | 2015-10-01 15:50:27 +0100 | [diff] [blame] | 2566 |         __ Mov(temp2, value.W()); | 
 | 2567 |         GetAssembler()->PoisonHeapReference(temp2); | 
 | 2568 |         source = temp2; | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2569 |       } | 
 | 2570 |  | 
 | 2571 |       if (!index.IsConstant()) { | 
 | 2572 |         __ Add(temp, array, offset); | 
 | 2573 |       } | 
| Nicolas Geoffray | 61b1dbe | 2015-10-01 10:27:52 +0100 | [diff] [blame] | 2574 |       __ Str(source, destination); | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2575 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 2576 |       if (!may_need_runtime_call_for_type_check) { | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2577 |         codegen_->MaybeRecordImplicitNullCheck(instruction); | 
 | 2578 |       } | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2579 |     } | 
| Nicolas Geoffray | e0395dd | 2015-09-25 11:04:45 +0100 | [diff] [blame] | 2580 |  | 
 | 2581 |     codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull()); | 
 | 2582 |  | 
 | 2583 |     if (done.IsLinked()) { | 
 | 2584 |       __ Bind(&done); | 
 | 2585 |     } | 
 | 2586 |  | 
 | 2587 |     if (slow_path != nullptr) { | 
 | 2588 |       __ Bind(slow_path->GetExitLabel()); | 
| Alexandre Rames | 97833a0 | 2015-04-16 15:07:12 +0100 | [diff] [blame] | 2589 |     } | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2590 |   } | 
 | 2591 | } | 
 | 2592 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2593 | void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 2594 |   RegisterSet caller_saves = RegisterSet::Empty(); | 
 | 2595 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 2596 |   caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode())); | 
 | 2597 |   caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode())); | 
 | 2598 |   LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2599 |   locations->SetInAt(0, Location::RequiresRegister()); | 
| Serban Constantinescu | 760d8ef | 2015-03-28 18:09:56 +0000 | [diff] [blame] | 2600 |   locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2601 | } | 
 | 2602 |  | 
 | 2603 | void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) { | 
| Serban Constantinescu | 5a6cc49 | 2015-08-13 15:20:25 +0100 | [diff] [blame] | 2604 |   BoundsCheckSlowPathARM64* slow_path = | 
 | 2605 |       new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2606 |   codegen_->AddSlowPath(slow_path); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2607 |   __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1)); | 
 | 2608 |   __ B(slow_path->GetEntryLabel(), hs); | 
 | 2609 | } | 
 | 2610 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2611 | void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) { | 
 | 2612 |   LocationSummary* locations = | 
 | 2613 |       new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath); | 
 | 2614 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2615 |   if (check->HasUses()) { | 
 | 2616 |     locations->SetOut(Location::SameAsFirstInput()); | 
 | 2617 |   } | 
 | 2618 | } | 
 | 2619 |  | 
 | 2620 | void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) { | 
 | 2621 |   // We assume the class is not null. | 
 | 2622 |   SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64( | 
 | 2623 |       check->GetLoadClass(), check, check->GetDexPc(), true); | 
 | 2624 |   codegen_->AddSlowPath(slow_path); | 
 | 2625 |   GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0)); | 
 | 2626 | } | 
 | 2627 |  | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 2628 | static bool IsFloatingPointZeroConstant(HInstruction* inst) { | 
 | 2629 |   return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero())) | 
 | 2630 |       || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero())); | 
 | 2631 | } | 
 | 2632 |  | 
 | 2633 | void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) { | 
 | 2634 |   FPRegister lhs_reg = InputFPRegisterAt(instruction, 0); | 
 | 2635 |   Location rhs_loc = instruction->GetLocations()->InAt(1); | 
 | 2636 |   if (rhs_loc.IsConstant()) { | 
 | 2637 |     // 0.0 is the only immediate that can be encoded directly in | 
 | 2638 |     // an FCMP instruction. | 
 | 2639 |     // | 
 | 2640 |     // Both the JLS (section 15.20.1) and the JVMS (section 6.5) | 
 | 2641 |     // specify that in a floating-point comparison, positive zero | 
 | 2642 |     // and negative zero are considered equal, so we can use the | 
 | 2643 |     // literal 0.0 for both cases here. | 
 | 2644 |     // | 
 | 2645 |     // Note however that some methods (Float.equal, Float.compare, | 
 | 2646 |     // Float.compareTo, Double.equal, Double.compare, | 
 | 2647 |     // Double.compareTo, Math.max, Math.min, StrictMath.max, | 
 | 2648 |     // StrictMath.min) consider 0.0 to be (strictly) greater than | 
 | 2649 |     // -0.0. So if we ever translate calls to these methods into a | 
 | 2650 |     // HCompare instruction, we must handle the -0.0 case with | 
 | 2651 |     // care here. | 
 | 2652 |     DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant())); | 
 | 2653 |     __ Fcmp(lhs_reg, 0.0); | 
 | 2654 |   } else { | 
 | 2655 |     __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1)); | 
 | 2656 |   } | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 2657 | } | 
 | 2658 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2659 | void LocationsBuilderARM64::VisitCompare(HCompare* compare) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2660 |   LocationSummary* locations = | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2661 |       new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); | 
 | 2662 |   Primitive::Type in_type = compare->InputAt(0)->GetType(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2663 |   switch (in_type) { | 
| Roland Levillain | a5c4a40 | 2016-03-15 15:02:50 +0000 | [diff] [blame] | 2664 |     case Primitive::kPrimBoolean: | 
 | 2665 |     case Primitive::kPrimByte: | 
 | 2666 |     case Primitive::kPrimShort: | 
 | 2667 |     case Primitive::kPrimChar: | 
| Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2668 |     case Primitive::kPrimInt: | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2669 |     case Primitive::kPrimLong: { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2670 |       locations->SetInAt(0, Location::RequiresRegister()); | 
| Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 2671 |       locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare)); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2672 |       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 2673 |       break; | 
 | 2674 |     } | 
 | 2675 |     case Primitive::kPrimFloat: | 
 | 2676 |     case Primitive::kPrimDouble: { | 
 | 2677 |       locations->SetInAt(0, Location::RequiresFpuRegister()); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 2678 |       locations->SetInAt(1, | 
 | 2679 |                          IsFloatingPointZeroConstant(compare->InputAt(1)) | 
 | 2680 |                              ? Location::ConstantLocation(compare->InputAt(1)->AsConstant()) | 
 | 2681 |                              : Location::RequiresFpuRegister()); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2682 |       locations->SetOut(Location::RequiresRegister()); | 
 | 2683 |       break; | 
 | 2684 |     } | 
 | 2685 |     default: | 
 | 2686 |       LOG(FATAL) << "Unexpected type for compare operation " << in_type; | 
 | 2687 |   } | 
 | 2688 | } | 
 | 2689 |  | 
 | 2690 | void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) { | 
 | 2691 |   Primitive::Type in_type = compare->InputAt(0)->GetType(); | 
 | 2692 |  | 
 | 2693 |   //  0 if: left == right | 
 | 2694 |   //  1 if: left  > right | 
 | 2695 |   // -1 if: left  < right | 
 | 2696 |   switch (in_type) { | 
| Roland Levillain | a5c4a40 | 2016-03-15 15:02:50 +0000 | [diff] [blame] | 2697 |     case Primitive::kPrimBoolean: | 
 | 2698 |     case Primitive::kPrimByte: | 
 | 2699 |     case Primitive::kPrimShort: | 
 | 2700 |     case Primitive::kPrimChar: | 
| Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2701 |     case Primitive::kPrimInt: | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2702 |     case Primitive::kPrimLong: { | 
 | 2703 |       Register result = OutputRegister(compare); | 
 | 2704 |       Register left = InputRegisterAt(compare, 0); | 
 | 2705 |       Operand right = InputOperandAt(compare, 1); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2706 |       __ Cmp(left, right); | 
| Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2707 |       __ Cset(result, ne);          // result == +1 if NE or 0 otherwise | 
 | 2708 |       __ Cneg(result, result, lt);  // result == -1 if LT or unchanged otherwise | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 2709 |       break; | 
 | 2710 |     } | 
 | 2711 |     case Primitive::kPrimFloat: | 
 | 2712 |     case Primitive::kPrimDouble: { | 
 | 2713 |       Register result = OutputRegister(compare); | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 2714 |       GenerateFcmp(compare); | 
| Vladimir Marko | d6e069b | 2016-01-18 11:11:01 +0000 | [diff] [blame] | 2715 |       __ Cset(result, ne); | 
 | 2716 |       __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias())); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2717 |       break; | 
 | 2718 |     } | 
 | 2719 |     default: | 
 | 2720 |       LOG(FATAL) << "Unimplemented compare type " << in_type; | 
 | 2721 |   } | 
 | 2722 | } | 
 | 2723 |  | 
| Vladimir Marko | 5f7b58e | 2015-11-23 19:49:34 +0000 | [diff] [blame] | 2724 | void LocationsBuilderARM64::HandleCondition(HCondition* instruction) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2725 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 2726 |  | 
 | 2727 |   if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) { | 
 | 2728 |     locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 2729 |     locations->SetInAt(1, | 
 | 2730 |                        IsFloatingPointZeroConstant(instruction->InputAt(1)) | 
 | 2731 |                            ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant()) | 
 | 2732 |                            : Location::RequiresFpuRegister()); | 
 | 2733 |   } else { | 
 | 2734 |     // Integer cases. | 
 | 2735 |     locations->SetInAt(0, Location::RequiresRegister()); | 
 | 2736 |     locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction)); | 
 | 2737 |   } | 
 | 2738 |  | 
| David Brazdil | b3e773e | 2016-01-26 11:28:37 +0000 | [diff] [blame] | 2739 |   if (!instruction->IsEmittedAtUseSite()) { | 
| Alexandre Rames | fb4e5fa | 2014-11-06 12:41:16 +0000 | [diff] [blame] | 2740 |     locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2741 |   } | 
 | 2742 | } | 
 | 2743 |  | 
| Vladimir Marko | 5f7b58e | 2015-11-23 19:49:34 +0000 | [diff] [blame] | 2744 | void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) { | 
| David Brazdil | b3e773e | 2016-01-26 11:28:37 +0000 | [diff] [blame] | 2745 |   if (instruction->IsEmittedAtUseSite()) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2746 |     return; | 
 | 2747 |   } | 
 | 2748 |  | 
 | 2749 |   LocationSummary* locations = instruction->GetLocations(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2750 |   Register res = RegisterFrom(locations->Out(), instruction->GetType()); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 2751 |   IfCondition if_cond = instruction->GetCondition(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2752 |  | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 2753 |   if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) { | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 2754 |     GenerateFcmp(instruction); | 
| Vladimir Marko | d6e069b | 2016-01-18 11:11:01 +0000 | [diff] [blame] | 2755 |     __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias())); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 2756 |   } else { | 
 | 2757 |     // Integer cases. | 
 | 2758 |     Register lhs = InputRegisterAt(instruction, 0); | 
 | 2759 |     Operand rhs = InputOperandAt(instruction, 1); | 
 | 2760 |     __ Cmp(lhs, rhs); | 
| Vladimir Marko | d6e069b | 2016-01-18 11:11:01 +0000 | [diff] [blame] | 2761 |     __ Cset(res, ARM64Condition(if_cond)); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 2762 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2763 | } | 
 | 2764 |  | 
 | 2765 | #define FOR_EACH_CONDITION_INSTRUCTION(M)                                                \ | 
 | 2766 |   M(Equal)                                                                               \ | 
 | 2767 |   M(NotEqual)                                                                            \ | 
 | 2768 |   M(LessThan)                                                                            \ | 
 | 2769 |   M(LessThanOrEqual)                                                                     \ | 
 | 2770 |   M(GreaterThan)                                                                         \ | 
| Aart Bik | e9f3760 | 2015-10-09 11:15:55 -0700 | [diff] [blame] | 2771 |   M(GreaterThanOrEqual)                                                                  \ | 
 | 2772 |   M(Below)                                                                               \ | 
 | 2773 |   M(BelowOrEqual)                                                                        \ | 
 | 2774 |   M(Above)                                                                               \ | 
 | 2775 |   M(AboveOrEqual) | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2776 | #define DEFINE_CONDITION_VISITORS(Name)                                                  \ | 
| Vladimir Marko | 5f7b58e | 2015-11-23 19:49:34 +0000 | [diff] [blame] | 2777 | void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }         \ | 
 | 2778 | void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2779 | FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS) | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2780 | #undef DEFINE_CONDITION_VISITORS | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 2781 | #undef FOR_EACH_CONDITION_INSTRUCTION | 
 | 2782 |  | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 2783 | void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) { | 
 | 2784 |   DCHECK(instruction->IsDiv() || instruction->IsRem()); | 
 | 2785 |  | 
 | 2786 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 2787 |   Location second = locations->InAt(1); | 
 | 2788 |   DCHECK(second.IsConstant()); | 
 | 2789 |  | 
 | 2790 |   Register out = OutputRegister(instruction); | 
 | 2791 |   Register dividend = InputRegisterAt(instruction, 0); | 
 | 2792 |   int64_t imm = Int64FromConstant(second.GetConstant()); | 
 | 2793 |   DCHECK(imm == 1 || imm == -1); | 
 | 2794 |  | 
 | 2795 |   if (instruction->IsRem()) { | 
 | 2796 |     __ Mov(out, 0); | 
 | 2797 |   } else { | 
 | 2798 |     if (imm == 1) { | 
 | 2799 |       __ Mov(out, dividend); | 
 | 2800 |     } else { | 
 | 2801 |       __ Neg(out, dividend); | 
 | 2802 |     } | 
 | 2803 |   } | 
 | 2804 | } | 
 | 2805 |  | 
 | 2806 | void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) { | 
 | 2807 |   DCHECK(instruction->IsDiv() || instruction->IsRem()); | 
 | 2808 |  | 
 | 2809 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 2810 |   Location second = locations->InAt(1); | 
 | 2811 |   DCHECK(second.IsConstant()); | 
 | 2812 |  | 
 | 2813 |   Register out = OutputRegister(instruction); | 
 | 2814 |   Register dividend = InputRegisterAt(instruction, 0); | 
 | 2815 |   int64_t imm = Int64FromConstant(second.GetConstant()); | 
| Nicolas Geoffray | 68f6289 | 2016-01-04 08:39:49 +0000 | [diff] [blame] | 2816 |   uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm)); | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 2817 |   int ctz_imm = CTZ(abs_imm); | 
 | 2818 |  | 
 | 2819 |   UseScratchRegisterScope temps(GetVIXLAssembler()); | 
 | 2820 |   Register temp = temps.AcquireSameSizeAs(out); | 
 | 2821 |  | 
 | 2822 |   if (instruction->IsDiv()) { | 
 | 2823 |     __ Add(temp, dividend, abs_imm - 1); | 
 | 2824 |     __ Cmp(dividend, 0); | 
 | 2825 |     __ Csel(out, temp, dividend, lt); | 
 | 2826 |     if (imm > 0) { | 
 | 2827 |       __ Asr(out, out, ctz_imm); | 
 | 2828 |     } else { | 
 | 2829 |       __ Neg(out, Operand(out, ASR, ctz_imm)); | 
 | 2830 |     } | 
 | 2831 |   } else { | 
 | 2832 |     int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64; | 
 | 2833 |     __ Asr(temp, dividend, bits - 1); | 
 | 2834 |     __ Lsr(temp, temp, bits - ctz_imm); | 
 | 2835 |     __ Add(out, dividend, temp); | 
 | 2836 |     __ And(out, out, abs_imm - 1); | 
 | 2837 |     __ Sub(out, out, temp); | 
 | 2838 |   } | 
 | 2839 | } | 
 | 2840 |  | 
 | 2841 | void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) { | 
 | 2842 |   DCHECK(instruction->IsDiv() || instruction->IsRem()); | 
 | 2843 |  | 
 | 2844 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 2845 |   Location second = locations->InAt(1); | 
 | 2846 |   DCHECK(second.IsConstant()); | 
 | 2847 |  | 
 | 2848 |   Register out = OutputRegister(instruction); | 
 | 2849 |   Register dividend = InputRegisterAt(instruction, 0); | 
 | 2850 |   int64_t imm = Int64FromConstant(second.GetConstant()); | 
 | 2851 |  | 
 | 2852 |   Primitive::Type type = instruction->GetResultType(); | 
 | 2853 |   DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); | 
 | 2854 |  | 
 | 2855 |   int64_t magic; | 
 | 2856 |   int shift; | 
 | 2857 |   CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift); | 
 | 2858 |  | 
 | 2859 |   UseScratchRegisterScope temps(GetVIXLAssembler()); | 
 | 2860 |   Register temp = temps.AcquireSameSizeAs(out); | 
 | 2861 |  | 
 | 2862 |   // temp = get_high(dividend * magic) | 
 | 2863 |   __ Mov(temp, magic); | 
 | 2864 |   if (type == Primitive::kPrimLong) { | 
 | 2865 |     __ Smulh(temp, dividend, temp); | 
 | 2866 |   } else { | 
 | 2867 |     __ Smull(temp.X(), dividend, temp); | 
 | 2868 |     __ Lsr(temp.X(), temp.X(), 32); | 
 | 2869 |   } | 
 | 2870 |  | 
 | 2871 |   if (imm > 0 && magic < 0) { | 
 | 2872 |     __ Add(temp, temp, dividend); | 
 | 2873 |   } else if (imm < 0 && magic > 0) { | 
 | 2874 |     __ Sub(temp, temp, dividend); | 
 | 2875 |   } | 
 | 2876 |  | 
 | 2877 |   if (shift != 0) { | 
 | 2878 |     __ Asr(temp, temp, shift); | 
 | 2879 |   } | 
 | 2880 |  | 
 | 2881 |   if (instruction->IsDiv()) { | 
 | 2882 |     __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31)); | 
 | 2883 |   } else { | 
 | 2884 |     __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31)); | 
 | 2885 |     // TODO: Strength reduction for msub. | 
 | 2886 |     Register temp_imm = temps.AcquireSameSizeAs(out); | 
 | 2887 |     __ Mov(temp_imm, imm); | 
 | 2888 |     __ Msub(out, temp, temp_imm, dividend); | 
 | 2889 |   } | 
 | 2890 | } | 
 | 2891 |  | 
 | 2892 | void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) { | 
 | 2893 |   DCHECK(instruction->IsDiv() || instruction->IsRem()); | 
 | 2894 |   Primitive::Type type = instruction->GetResultType(); | 
 | 2895 |   DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong); | 
 | 2896 |  | 
 | 2897 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 2898 |   Register out = OutputRegister(instruction); | 
 | 2899 |   Location second = locations->InAt(1); | 
 | 2900 |  | 
 | 2901 |   if (second.IsConstant()) { | 
 | 2902 |     int64_t imm = Int64FromConstant(second.GetConstant()); | 
 | 2903 |  | 
 | 2904 |     if (imm == 0) { | 
 | 2905 |       // Do not generate anything. DivZeroCheck would prevent any code to be executed. | 
 | 2906 |     } else if (imm == 1 || imm == -1) { | 
 | 2907 |       DivRemOneOrMinusOne(instruction); | 
| Nicolas Geoffray | 68f6289 | 2016-01-04 08:39:49 +0000 | [diff] [blame] | 2908 |     } else if (IsPowerOfTwo(AbsOrMin(imm))) { | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 2909 |       DivRemByPowerOfTwo(instruction); | 
 | 2910 |     } else { | 
 | 2911 |       DCHECK(imm <= -2 || imm >= 2); | 
 | 2912 |       GenerateDivRemWithAnyConstant(instruction); | 
 | 2913 |     } | 
 | 2914 |   } else { | 
 | 2915 |     Register dividend = InputRegisterAt(instruction, 0); | 
 | 2916 |     Register divisor = InputRegisterAt(instruction, 1); | 
 | 2917 |     if (instruction->IsDiv()) { | 
 | 2918 |       __ Sdiv(out, dividend, divisor); | 
 | 2919 |     } else { | 
 | 2920 |       UseScratchRegisterScope temps(GetVIXLAssembler()); | 
 | 2921 |       Register temp = temps.AcquireSameSizeAs(out); | 
 | 2922 |       __ Sdiv(temp, dividend, divisor); | 
 | 2923 |       __ Msub(out, temp, divisor, dividend); | 
 | 2924 |     } | 
 | 2925 |   } | 
 | 2926 | } | 
 | 2927 |  | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2928 | void LocationsBuilderARM64::VisitDiv(HDiv* div) { | 
 | 2929 |   LocationSummary* locations = | 
 | 2930 |       new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall); | 
 | 2931 |   switch (div->GetResultType()) { | 
 | 2932 |     case Primitive::kPrimInt: | 
 | 2933 |     case Primitive::kPrimLong: | 
 | 2934 |       locations->SetInAt(0, Location::RequiresRegister()); | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 2935 |       locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1))); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2936 |       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 2937 |       break; | 
 | 2938 |  | 
 | 2939 |     case Primitive::kPrimFloat: | 
 | 2940 |     case Primitive::kPrimDouble: | 
 | 2941 |       locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 2942 |       locations->SetInAt(1, Location::RequiresFpuRegister()); | 
 | 2943 |       locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); | 
 | 2944 |       break; | 
 | 2945 |  | 
 | 2946 |     default: | 
 | 2947 |       LOG(FATAL) << "Unexpected div type " << div->GetResultType(); | 
 | 2948 |   } | 
 | 2949 | } | 
 | 2950 |  | 
 | 2951 | void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) { | 
 | 2952 |   Primitive::Type type = div->GetResultType(); | 
 | 2953 |   switch (type) { | 
 | 2954 |     case Primitive::kPrimInt: | 
 | 2955 |     case Primitive::kPrimLong: | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 2956 |       GenerateDivRemIntegral(div); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 2957 |       break; | 
 | 2958 |  | 
 | 2959 |     case Primitive::kPrimFloat: | 
 | 2960 |     case Primitive::kPrimDouble: | 
 | 2961 |       __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1)); | 
 | 2962 |       break; | 
 | 2963 |  | 
 | 2964 |     default: | 
 | 2965 |       LOG(FATAL) << "Unexpected div type " << type; | 
 | 2966 |   } | 
 | 2967 | } | 
 | 2968 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2969 | void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 2970 |   LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2971 |   locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0))); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2972 | } | 
 | 2973 |  | 
 | 2974 | void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) { | 
 | 2975 |   SlowPathCodeARM64* slow_path = | 
 | 2976 |       new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction); | 
 | 2977 |   codegen_->AddSlowPath(slow_path); | 
 | 2978 |   Location value = instruction->GetLocations()->InAt(0); | 
 | 2979 |  | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 2980 |   Primitive::Type type = instruction->GetType(); | 
 | 2981 |  | 
| Nicolas Geoffray | e567161 | 2016-03-16 11:03:54 +0000 | [diff] [blame] | 2982 |   if (!Primitive::IsIntegralType(type)) { | 
 | 2983 |     LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck."; | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 2984 |     return; | 
 | 2985 |   } | 
 | 2986 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2987 |   if (value.IsConstant()) { | 
 | 2988 |     int64_t divisor = Int64ConstantFrom(value); | 
 | 2989 |     if (divisor == 0) { | 
 | 2990 |       __ B(slow_path->GetEntryLabel()); | 
 | 2991 |     } else { | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 2992 |       // A division by a non-null constant is valid. We don't need to perform | 
 | 2993 |       // any check, so simply fall through. | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 2994 |     } | 
 | 2995 |   } else { | 
 | 2996 |     __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel()); | 
 | 2997 |   } | 
 | 2998 | } | 
 | 2999 |  | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 3000 | void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) { | 
 | 3001 |   LocationSummary* locations = | 
 | 3002 |       new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); | 
 | 3003 |   locations->SetOut(Location::ConstantLocation(constant)); | 
 | 3004 | } | 
 | 3005 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 3006 | void InstructionCodeGeneratorARM64::VisitDoubleConstant( | 
 | 3007 |     HDoubleConstant* constant ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 3008 |   // Will be generated at use site. | 
 | 3009 | } | 
 | 3010 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3011 | void LocationsBuilderARM64::VisitExit(HExit* exit) { | 
 | 3012 |   exit->SetLocations(nullptr); | 
 | 3013 | } | 
 | 3014 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 3015 | void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3016 | } | 
 | 3017 |  | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 3018 | void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) { | 
 | 3019 |   LocationSummary* locations = | 
 | 3020 |       new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); | 
 | 3021 |   locations->SetOut(Location::ConstantLocation(constant)); | 
 | 3022 | } | 
 | 3023 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 3024 | void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 3025 |   // Will be generated at use site. | 
 | 3026 | } | 
 | 3027 |  | 
| David Brazdil | fc6a86a | 2015-06-26 10:33:45 +0000 | [diff] [blame] | 3028 | void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 3029 |   DCHECK(!successor->IsExitBlock()); | 
 | 3030 |   HBasicBlock* block = got->GetBlock(); | 
 | 3031 |   HInstruction* previous = got->GetPrevious(); | 
 | 3032 |   HLoopInformation* info = block->GetLoopInformation(); | 
 | 3033 |  | 
| David Brazdil | 46e2a39 | 2015-03-16 17:31:52 +0000 | [diff] [blame] | 3034 |   if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 3035 |     codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); | 
 | 3036 |     GenerateSuspendCheck(info->GetSuspendCheck(), successor); | 
 | 3037 |     return; | 
 | 3038 |   } | 
 | 3039 |   if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { | 
 | 3040 |     GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); | 
 | 3041 |   } | 
 | 3042 |   if (!codegen_->GoesToNextBlock(block, successor)) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3043 |     __ B(codegen_->GetLabelOf(successor)); | 
 | 3044 |   } | 
 | 3045 | } | 
 | 3046 |  | 
| David Brazdil | fc6a86a | 2015-06-26 10:33:45 +0000 | [diff] [blame] | 3047 | void LocationsBuilderARM64::VisitGoto(HGoto* got) { | 
 | 3048 |   got->SetLocations(nullptr); | 
 | 3049 | } | 
 | 3050 |  | 
 | 3051 | void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) { | 
 | 3052 |   HandleGoto(got, got->GetSuccessor()); | 
 | 3053 | } | 
 | 3054 |  | 
 | 3055 | void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) { | 
 | 3056 |   try_boundary->SetLocations(nullptr); | 
 | 3057 | } | 
 | 3058 |  | 
 | 3059 | void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) { | 
 | 3060 |   HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor(); | 
 | 3061 |   if (!successor->IsExitBlock()) { | 
 | 3062 |     HandleGoto(try_boundary, successor); | 
 | 3063 |   } | 
 | 3064 | } | 
 | 3065 |  | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 3066 | void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction, | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3067 |                                                           size_t condition_input_index, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3068 |                                                           vixl::aarch64::Label* true_target, | 
 | 3069 |                                                           vixl::aarch64::Label* false_target) { | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3070 |   // FP branching requires both targets to be explicit. If either of the targets | 
 | 3071 |   // is nullptr (fallthrough) use and bind `fallthrough_target` instead. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3072 |   vixl::aarch64::Label fallthrough_target; | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3073 |   HInstruction* cond = instruction->InputAt(condition_input_index); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3074 |  | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3075 |   if (true_target == nullptr && false_target == nullptr) { | 
 | 3076 |     // Nothing to do. The code always falls through. | 
 | 3077 |     return; | 
 | 3078 |   } else if (cond->IsIntConstant()) { | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 3079 |     // Constant condition, statically compared against "true" (integer value 1). | 
 | 3080 |     if (cond->AsIntConstant()->IsTrue()) { | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3081 |       if (true_target != nullptr) { | 
 | 3082 |         __ B(true_target); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 3083 |       } | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 3084 |     } else { | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 3085 |       DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue(); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3086 |       if (false_target != nullptr) { | 
 | 3087 |         __ B(false_target); | 
 | 3088 |       } | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 3089 |     } | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3090 |     return; | 
 | 3091 |   } | 
 | 3092 |  | 
 | 3093 |   // The following code generates these patterns: | 
 | 3094 |   //  (1) true_target == nullptr && false_target != nullptr | 
 | 3095 |   //        - opposite condition true => branch to false_target | 
 | 3096 |   //  (2) true_target != nullptr && false_target == nullptr | 
 | 3097 |   //        - condition true => branch to true_target | 
 | 3098 |   //  (3) true_target != nullptr && false_target != nullptr | 
 | 3099 |   //        - condition true => branch to true_target | 
 | 3100 |   //        - branch to false_target | 
 | 3101 |   if (IsBooleanValueOrMaterializedCondition(cond)) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3102 |     // The condition instruction has been materialized, compare the output to 0. | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3103 |     Location cond_val = instruction->GetLocations()->InAt(condition_input_index); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3104 |     DCHECK(cond_val.IsRegister()); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3105 |       if (true_target == nullptr) { | 
 | 3106 |       __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target); | 
 | 3107 |     } else { | 
 | 3108 |       __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target); | 
 | 3109 |     } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3110 |   } else { | 
 | 3111 |     // The condition instruction has not been materialized, use its inputs as | 
 | 3112 |     // the comparison and its condition as the branch condition. | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3113 |     HCondition* condition = cond->AsCondition(); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3114 |  | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3115 |     Primitive::Type type = condition->InputAt(0)->GetType(); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3116 |     if (Primitive::IsFloatingPointType(type)) { | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 3117 |       GenerateFcmp(condition); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3118 |       if (true_target == nullptr) { | 
| Vladimir Marko | d6e069b | 2016-01-18 11:11:01 +0000 | [diff] [blame] | 3119 |         IfCondition opposite_condition = condition->GetOppositeCondition(); | 
 | 3120 |         __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3121 |       } else { | 
| Vladimir Marko | d6e069b | 2016-01-18 11:11:01 +0000 | [diff] [blame] | 3122 |         __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3123 |       } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3124 |     } else { | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3125 |       // Integer cases. | 
 | 3126 |       Register lhs = InputRegisterAt(condition, 0); | 
 | 3127 |       Operand rhs = InputOperandAt(condition, 1); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3128 |  | 
 | 3129 |       Condition arm64_cond; | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3130 |       vixl::aarch64::Label* non_fallthrough_target; | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3131 |       if (true_target == nullptr) { | 
 | 3132 |         arm64_cond = ARM64Condition(condition->GetOppositeCondition()); | 
 | 3133 |         non_fallthrough_target = false_target; | 
 | 3134 |       } else { | 
 | 3135 |         arm64_cond = ARM64Condition(condition->GetCondition()); | 
 | 3136 |         non_fallthrough_target = true_target; | 
 | 3137 |       } | 
 | 3138 |  | 
| Aart Bik | 086d27e | 2016-01-20 17:02:00 -0800 | [diff] [blame] | 3139 |       if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) && | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3140 |           rhs.IsImmediate() && (rhs.GetImmediate() == 0)) { | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3141 |         switch (arm64_cond) { | 
 | 3142 |           case eq: | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3143 |             __ Cbz(lhs, non_fallthrough_target); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3144 |             break; | 
 | 3145 |           case ne: | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3146 |             __ Cbnz(lhs, non_fallthrough_target); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3147 |             break; | 
 | 3148 |           case lt: | 
 | 3149 |             // Test the sign bit and branch accordingly. | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3150 |             __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3151 |             break; | 
 | 3152 |           case ge: | 
 | 3153 |             // Test the sign bit and branch accordingly. | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3154 |             __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3155 |             break; | 
 | 3156 |           default: | 
 | 3157 |             // Without the `static_cast` the compiler throws an error for | 
 | 3158 |             // `-Werror=sign-promo`. | 
 | 3159 |             LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond); | 
 | 3160 |         } | 
 | 3161 |       } else { | 
 | 3162 |         __ Cmp(lhs, rhs); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3163 |         __ B(arm64_cond, non_fallthrough_target); | 
| Roland Levillain | 7f63c52 | 2015-07-13 15:54:55 +0000 | [diff] [blame] | 3164 |       } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3165 |     } | 
 | 3166 |   } | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3167 |  | 
 | 3168 |   // If neither branch falls through (case 3), the conditional branch to `true_target` | 
 | 3169 |   // was already emitted (case 2) and we need to emit a jump to `false_target`. | 
 | 3170 |   if (true_target != nullptr && false_target != nullptr) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3171 |     __ B(false_target); | 
 | 3172 |   } | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3173 |  | 
 | 3174 |   if (fallthrough_target.IsLinked()) { | 
 | 3175 |     __ Bind(&fallthrough_target); | 
 | 3176 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3177 | } | 
 | 3178 |  | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 3179 | void LocationsBuilderARM64::VisitIf(HIf* if_instr) { | 
 | 3180 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3181 |   if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) { | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 3182 |     locations->SetInAt(0, Location::RequiresRegister()); | 
 | 3183 |   } | 
 | 3184 | } | 
 | 3185 |  | 
 | 3186 | void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) { | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3187 |   HBasicBlock* true_successor = if_instr->IfTrueSuccessor(); | 
 | 3188 |   HBasicBlock* false_successor = if_instr->IfFalseSuccessor(); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3189 |   vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor); | 
 | 3190 |   if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) { | 
 | 3191 |     true_target = nullptr; | 
 | 3192 |   } | 
 | 3193 |   vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor); | 
 | 3194 |   if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) { | 
 | 3195 |     false_target = nullptr; | 
 | 3196 |   } | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3197 |   GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target); | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 3198 | } | 
 | 3199 |  | 
 | 3200 | void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) { | 
 | 3201 |   LocationSummary* locations = new (GetGraph()->GetArena()) | 
 | 3202 |       LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath); | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 3203 |   locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers. | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3204 |   if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) { | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 3205 |     locations->SetInAt(0, Location::RequiresRegister()); | 
 | 3206 |   } | 
 | 3207 | } | 
 | 3208 |  | 
 | 3209 | void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) { | 
| Aart Bik | 42249c3 | 2016-01-07 15:33:50 -0800 | [diff] [blame] | 3210 |   SlowPathCodeARM64* slow_path = | 
 | 3211 |       deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize); | 
| David Brazdil | 0debae7 | 2015-11-12 18:37:00 +0000 | [diff] [blame] | 3212 |   GenerateTestAndBranch(deoptimize, | 
 | 3213 |                         /* condition_input_index */ 0, | 
 | 3214 |                         slow_path->GetEntryLabel(), | 
 | 3215 |                         /* false_target */ nullptr); | 
| Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 3216 | } | 
 | 3217 |  | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3218 | static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) { | 
 | 3219 |   return condition->IsCondition() && | 
 | 3220 |          Primitive::IsFloatingPointType(condition->InputAt(0)->GetType()); | 
 | 3221 | } | 
 | 3222 |  | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3223 | static inline Condition GetConditionForSelect(HCondition* condition) { | 
 | 3224 |   IfCondition cond = condition->AsCondition()->GetCondition(); | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3225 |   return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias()) | 
 | 3226 |                                                      : ARM64Condition(cond); | 
 | 3227 | } | 
 | 3228 |  | 
| David Brazdil | 74eb1b2 | 2015-12-14 11:44:01 +0000 | [diff] [blame] | 3229 | void LocationsBuilderARM64::VisitSelect(HSelect* select) { | 
 | 3230 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select); | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3231 |   if (Primitive::IsFloatingPointType(select->GetType())) { | 
 | 3232 |     locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 3233 |     locations->SetInAt(1, Location::RequiresFpuRegister()); | 
 | 3234 |     locations->SetOut(Location::RequiresFpuRegister()); | 
 | 3235 |   } else { | 
 | 3236 |     HConstant* cst_true_value = select->GetTrueValue()->AsConstant(); | 
 | 3237 |     HConstant* cst_false_value = select->GetFalseValue()->AsConstant(); | 
 | 3238 |     bool is_true_value_constant = cst_true_value != nullptr; | 
 | 3239 |     bool is_false_value_constant = cst_false_value != nullptr; | 
 | 3240 |     // Ask VIXL whether we should synthesize constants in registers. | 
 | 3241 |     // We give an arbitrary register to VIXL when dealing with non-constant inputs. | 
 | 3242 |     Operand true_op = is_true_value_constant ? | 
 | 3243 |         Operand(Int64FromConstant(cst_true_value)) : Operand(x1); | 
 | 3244 |     Operand false_op = is_false_value_constant ? | 
 | 3245 |         Operand(Int64FromConstant(cst_false_value)) : Operand(x2); | 
 | 3246 |     bool true_value_in_register = false; | 
 | 3247 |     bool false_value_in_register = false; | 
 | 3248 |     MacroAssembler::GetCselSynthesisInformation( | 
 | 3249 |         x0, true_op, false_op, &true_value_in_register, &false_value_in_register); | 
 | 3250 |     true_value_in_register |= !is_true_value_constant; | 
 | 3251 |     false_value_in_register |= !is_false_value_constant; | 
 | 3252 |  | 
 | 3253 |     locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister() | 
 | 3254 |                                                  : Location::ConstantLocation(cst_true_value)); | 
 | 3255 |     locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister() | 
 | 3256 |                                                   : Location::ConstantLocation(cst_false_value)); | 
 | 3257 |     locations->SetOut(Location::RequiresRegister()); | 
| David Brazdil | 74eb1b2 | 2015-12-14 11:44:01 +0000 | [diff] [blame] | 3258 |   } | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3259 |  | 
| David Brazdil | 74eb1b2 | 2015-12-14 11:44:01 +0000 | [diff] [blame] | 3260 |   if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) { | 
 | 3261 |     locations->SetInAt(2, Location::RequiresRegister()); | 
 | 3262 |   } | 
| David Brazdil | 74eb1b2 | 2015-12-14 11:44:01 +0000 | [diff] [blame] | 3263 | } | 
 | 3264 |  | 
 | 3265 | void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) { | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3266 |   HInstruction* cond = select->GetCondition(); | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3267 |   Condition csel_cond; | 
 | 3268 |  | 
 | 3269 |   if (IsBooleanValueOrMaterializedCondition(cond)) { | 
 | 3270 |     if (cond->IsCondition() && cond->GetNext() == select) { | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3271 |       // Use the condition flags set by the previous instruction. | 
 | 3272 |       csel_cond = GetConditionForSelect(cond->AsCondition()); | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3273 |     } else { | 
 | 3274 |       __ Cmp(InputRegisterAt(select, 2), 0); | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3275 |       csel_cond = ne; | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3276 |     } | 
 | 3277 |   } else if (IsConditionOnFloatingPointValues(cond)) { | 
| Roland Levillain | 1a65388 | 2016-03-18 18:05:57 +0000 | [diff] [blame] | 3278 |     GenerateFcmp(cond); | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3279 |     csel_cond = GetConditionForSelect(cond->AsCondition()); | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3280 |   } else { | 
 | 3281 |     __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1)); | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3282 |     csel_cond = GetConditionForSelect(cond->AsCondition()); | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3283 |   } | 
 | 3284 |  | 
| Alexandre Rames | 880f119 | 2016-06-13 16:04:50 +0100 | [diff] [blame] | 3285 |   if (Primitive::IsFloatingPointType(select->GetType())) { | 
 | 3286 |     __ Fcsel(OutputFPRegister(select), | 
 | 3287 |              InputFPRegisterAt(select, 1), | 
 | 3288 |              InputFPRegisterAt(select, 0), | 
 | 3289 |              csel_cond); | 
 | 3290 |   } else { | 
 | 3291 |     __ Csel(OutputRegister(select), | 
 | 3292 |             InputOperandAt(select, 1), | 
 | 3293 |             InputOperandAt(select, 0), | 
 | 3294 |             csel_cond); | 
| David Brazdil | c0b601b | 2016-02-08 14:20:45 +0000 | [diff] [blame] | 3295 |   } | 
| David Brazdil | 74eb1b2 | 2015-12-14 11:44:01 +0000 | [diff] [blame] | 3296 | } | 
 | 3297 |  | 
| David Srbecky | 0cf4493 | 2015-12-09 14:09:59 +0000 | [diff] [blame] | 3298 | void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) { | 
 | 3299 |   new (GetGraph()->GetArena()) LocationSummary(info); | 
 | 3300 | } | 
 | 3301 |  | 
| David Srbecky | d28f4a0 | 2016-03-14 17:14:24 +0000 | [diff] [blame] | 3302 | void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) { | 
 | 3303 |   // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile. | 
| David Srbecky | c7098ff | 2016-02-09 14:30:11 +0000 | [diff] [blame] | 3304 | } | 
 | 3305 |  | 
 | 3306 | void CodeGeneratorARM64::GenerateNop() { | 
 | 3307 |   __ Nop(); | 
| David Srbecky | 0cf4493 | 2015-12-09 14:09:59 +0000 | [diff] [blame] | 3308 | } | 
 | 3309 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3310 | void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 3311 |   HandleFieldGet(instruction); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3312 | } | 
 | 3313 |  | 
 | 3314 | void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 3315 |   HandleFieldGet(instruction, instruction->GetFieldInfo()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3316 | } | 
 | 3317 |  | 
 | 3318 | void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 3319 |   HandleFieldSet(instruction); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3320 | } | 
 | 3321 |  | 
 | 3322 | void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 3323 |   HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3324 | } | 
 | 3325 |  | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3326 | // Temp is used for read barrier. | 
 | 3327 | static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) { | 
 | 3328 |   if (kEmitCompilerReadBarrier && | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3329 |       (kUseBakerReadBarrier || | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3330 |           type_check_kind == TypeCheckKind::kAbstractClassCheck || | 
 | 3331 |           type_check_kind == TypeCheckKind::kClassHierarchyCheck || | 
 | 3332 |           type_check_kind == TypeCheckKind::kArrayObjectCheck)) { | 
 | 3333 |     return 1; | 
 | 3334 |   } | 
 | 3335 |   return 0; | 
 | 3336 | } | 
 | 3337 |  | 
 | 3338 | // InteraceCheck has 3 temps, one for holding the number of interfaces, one for the current | 
 | 3339 | // interface pointer, one for loading the current interface. | 
 | 3340 | // The other checks have one temp for loading the object's class. | 
 | 3341 | static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) { | 
 | 3342 |   if (type_check_kind == TypeCheckKind::kInterfaceCheck) { | 
 | 3343 |     return 3; | 
 | 3344 |   } | 
 | 3345 |   return 1 + NumberOfInstanceOfTemps(type_check_kind); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3346 | } | 
 | 3347 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3348 | void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) { | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3349 |   LocationSummary::CallKind call_kind = LocationSummary::kNoCall; | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3350 |   TypeCheckKind type_check_kind = instruction->GetTypeCheckKind(); | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 3351 |   bool baker_read_barrier_slow_path = false; | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3352 |   switch (type_check_kind) { | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3353 |     case TypeCheckKind::kExactCheck: | 
 | 3354 |     case TypeCheckKind::kAbstractClassCheck: | 
 | 3355 |     case TypeCheckKind::kClassHierarchyCheck: | 
 | 3356 |     case TypeCheckKind::kArrayObjectCheck: | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3357 |       call_kind = | 
 | 3358 |           kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall; | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 3359 |       baker_read_barrier_slow_path = kUseBakerReadBarrier; | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3360 |       break; | 
 | 3361 |     case TypeCheckKind::kArrayCheck: | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3362 |     case TypeCheckKind::kUnresolvedCheck: | 
 | 3363 |     case TypeCheckKind::kInterfaceCheck: | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3364 |       call_kind = LocationSummary::kCallOnSlowPath; | 
 | 3365 |       break; | 
 | 3366 |   } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3367 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3368 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 3369 |   if (baker_read_barrier_slow_path) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 3370 |     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers. | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 3371 |   } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3372 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 3373 |   locations->SetInAt(1, Location::RequiresRegister()); | 
 | 3374 |   // The "out" register is used as a temporary, so it overlaps with the inputs. | 
 | 3375 |   // Note that TypeCheckSlowPathARM64 uses this register too. | 
 | 3376 |   locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3377 |   // Add temps if necessary for read barriers. | 
 | 3378 |   locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3379 | } | 
 | 3380 |  | 
 | 3381 | void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3382 |   TypeCheckKind type_check_kind = instruction->GetTypeCheckKind(); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3383 |   LocationSummary* locations = instruction->GetLocations(); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3384 |   Location obj_loc = locations->InAt(0); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3385 |   Register obj = InputRegisterAt(instruction, 0); | 
 | 3386 |   Register cls = InputRegisterAt(instruction, 1); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3387 |   Location out_loc = locations->Out(); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3388 |   Register out = OutputRegister(instruction); | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3389 |   const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind); | 
 | 3390 |   DCHECK_LE(num_temps, 1u); | 
 | 3391 |   Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation(); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3392 |   uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); | 
 | 3393 |   uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value(); | 
 | 3394 |   uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value(); | 
 | 3395 |   uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value(); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3396 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3397 |   vixl::aarch64::Label done, zero; | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3398 |   SlowPathCodeARM64* slow_path = nullptr; | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3399 |  | 
 | 3400 |   // Return 0 if `obj` is null. | 
| Guillaume "Vermeille" Sanchez | af88835 | 2015-04-20 14:41:30 +0100 | [diff] [blame] | 3401 |   // Avoid null check if we know `obj` is not null. | 
 | 3402 |   if (instruction->MustDoNullCheck()) { | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3403 |     __ Cbz(obj, &zero); | 
 | 3404 |   } | 
 | 3405 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3406 |   // /* HeapReference<Class> */ out = obj->klass_ | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3407 |   GenerateReferenceLoadTwoRegisters(instruction, | 
 | 3408 |                                     out_loc, | 
 | 3409 |                                     obj_loc, | 
 | 3410 |                                     class_offset, | 
 | 3411 |                                     maybe_temp_loc, | 
 | 3412 |                                     kEmitCompilerReadBarrier); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3413 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3414 |   switch (type_check_kind) { | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3415 |     case TypeCheckKind::kExactCheck: { | 
 | 3416 |       __ Cmp(out, cls); | 
 | 3417 |       __ Cset(out, eq); | 
 | 3418 |       if (zero.IsLinked()) { | 
 | 3419 |         __ B(&done); | 
 | 3420 |       } | 
 | 3421 |       break; | 
 | 3422 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3423 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3424 |     case TypeCheckKind::kAbstractClassCheck: { | 
 | 3425 |       // If the class is abstract, we eagerly fetch the super class of the | 
 | 3426 |       // object to avoid doing a comparison we know will fail. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3427 |       vixl::aarch64::Label loop, success; | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3428 |       __ Bind(&loop); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3429 |       // /* HeapReference<Class> */ out = out->super_class_ | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3430 |       GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3431 |       // If `out` is null, we use it for the result, and jump to `done`. | 
 | 3432 |       __ Cbz(out, &done); | 
 | 3433 |       __ Cmp(out, cls); | 
 | 3434 |       __ B(ne, &loop); | 
 | 3435 |       __ Mov(out, 1); | 
 | 3436 |       if (zero.IsLinked()) { | 
 | 3437 |         __ B(&done); | 
 | 3438 |       } | 
 | 3439 |       break; | 
 | 3440 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3441 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3442 |     case TypeCheckKind::kClassHierarchyCheck: { | 
 | 3443 |       // Walk over the class hierarchy to find a match. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3444 |       vixl::aarch64::Label loop, success; | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3445 |       __ Bind(&loop); | 
 | 3446 |       __ Cmp(out, cls); | 
 | 3447 |       __ B(eq, &success); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3448 |       // /* HeapReference<Class> */ out = out->super_class_ | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3449 |       GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3450 |       __ Cbnz(out, &loop); | 
 | 3451 |       // If `out` is null, we use it for the result, and jump to `done`. | 
 | 3452 |       __ B(&done); | 
 | 3453 |       __ Bind(&success); | 
 | 3454 |       __ Mov(out, 1); | 
 | 3455 |       if (zero.IsLinked()) { | 
 | 3456 |         __ B(&done); | 
 | 3457 |       } | 
 | 3458 |       break; | 
 | 3459 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3460 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3461 |     case TypeCheckKind::kArrayObjectCheck: { | 
| Nicolas Geoffray | abfcf18 | 2015-09-21 18:41:21 +0100 | [diff] [blame] | 3462 |       // Do an exact check. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3463 |       vixl::aarch64::Label exact_check; | 
| Nicolas Geoffray | abfcf18 | 2015-09-21 18:41:21 +0100 | [diff] [blame] | 3464 |       __ Cmp(out, cls); | 
 | 3465 |       __ B(eq, &exact_check); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3466 |       // Otherwise, we need to check that the object's class is a non-primitive array. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3467 |       // /* HeapReference<Class> */ out = out->component_type_ | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3468 |       GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3469 |       // If `out` is null, we use it for the result, and jump to `done`. | 
 | 3470 |       __ Cbz(out, &done); | 
 | 3471 |       __ Ldrh(out, HeapOperand(out, primitive_offset)); | 
 | 3472 |       static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); | 
 | 3473 |       __ Cbnz(out, &zero); | 
| Nicolas Geoffray | abfcf18 | 2015-09-21 18:41:21 +0100 | [diff] [blame] | 3474 |       __ Bind(&exact_check); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3475 |       __ Mov(out, 1); | 
 | 3476 |       __ B(&done); | 
 | 3477 |       break; | 
 | 3478 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3479 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3480 |     case TypeCheckKind::kArrayCheck: { | 
 | 3481 |       __ Cmp(out, cls); | 
 | 3482 |       DCHECK(locations->OnlyCallsOnSlowPath()); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3483 |       slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction, | 
 | 3484 |                                                                       /* is_fatal */ false); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3485 |       codegen_->AddSlowPath(slow_path); | 
 | 3486 |       __ B(ne, slow_path->GetEntryLabel()); | 
 | 3487 |       __ Mov(out, 1); | 
 | 3488 |       if (zero.IsLinked()) { | 
 | 3489 |         __ B(&done); | 
 | 3490 |       } | 
 | 3491 |       break; | 
 | 3492 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3493 |  | 
| Calin Juravle | 98893e1 | 2015-10-02 21:05:03 +0100 | [diff] [blame] | 3494 |     case TypeCheckKind::kUnresolvedCheck: | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3495 |     case TypeCheckKind::kInterfaceCheck: { | 
 | 3496 |       // Note that we indeed only call on slow path, but we always go | 
 | 3497 |       // into the slow path for the unresolved and interface check | 
 | 3498 |       // cases. | 
 | 3499 |       // | 
 | 3500 |       // We cannot directly call the InstanceofNonTrivial runtime | 
 | 3501 |       // entry point without resorting to a type checking slow path | 
 | 3502 |       // here (i.e. by calling InvokeRuntime directly), as it would | 
 | 3503 |       // require to assign fixed registers for the inputs of this | 
 | 3504 |       // HInstanceOf instruction (following the runtime calling | 
 | 3505 |       // convention), which might be cluttered by the potential first | 
 | 3506 |       // read barrier emission at the beginning of this method. | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3507 |       // | 
 | 3508 |       // TODO: Introduce a new runtime entry point taking the object | 
 | 3509 |       // to test (instead of its class) as argument, and let it deal | 
 | 3510 |       // with the read barrier issues. This will let us refactor this | 
 | 3511 |       // case of the `switch` code as it was previously (with a direct | 
 | 3512 |       // call to the runtime not using a type checking slow path). | 
 | 3513 |       // This should also be beneficial for the other cases above. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3514 |       DCHECK(locations->OnlyCallsOnSlowPath()); | 
 | 3515 |       slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction, | 
 | 3516 |                                                                       /* is_fatal */ false); | 
 | 3517 |       codegen_->AddSlowPath(slow_path); | 
 | 3518 |       __ B(slow_path->GetEntryLabel()); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3519 |       if (zero.IsLinked()) { | 
 | 3520 |         __ B(&done); | 
 | 3521 |       } | 
 | 3522 |       break; | 
 | 3523 |     } | 
 | 3524 |   } | 
 | 3525 |  | 
 | 3526 |   if (zero.IsLinked()) { | 
 | 3527 |     __ Bind(&zero); | 
| Guillaume "Vermeille" Sanchez | af88835 | 2015-04-20 14:41:30 +0100 | [diff] [blame] | 3528 |     __ Mov(out, 0); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3529 |   } | 
 | 3530 |  | 
 | 3531 |   if (done.IsLinked()) { | 
 | 3532 |     __ Bind(&done); | 
 | 3533 |   } | 
 | 3534 |  | 
 | 3535 |   if (slow_path != nullptr) { | 
 | 3536 |     __ Bind(slow_path->GetExitLabel()); | 
 | 3537 |   } | 
 | 3538 | } | 
 | 3539 |  | 
 | 3540 | void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) { | 
 | 3541 |   LocationSummary::CallKind call_kind = LocationSummary::kNoCall; | 
 | 3542 |   bool throws_into_catch = instruction->CanThrowIntoCatchBlock(); | 
 | 3543 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3544 |   TypeCheckKind type_check_kind = instruction->GetTypeCheckKind(); | 
 | 3545 |   switch (type_check_kind) { | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3546 |     case TypeCheckKind::kExactCheck: | 
 | 3547 |     case TypeCheckKind::kAbstractClassCheck: | 
 | 3548 |     case TypeCheckKind::kClassHierarchyCheck: | 
 | 3549 |     case TypeCheckKind::kArrayObjectCheck: | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3550 |       call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ? | 
 | 3551 |           LocationSummary::kCallOnSlowPath : | 
 | 3552 |           LocationSummary::kNoCall;  // In fact, call on a fatal (non-returning) slow path. | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3553 |       break; | 
 | 3554 |     case TypeCheckKind::kArrayCheck: | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3555 |     case TypeCheckKind::kUnresolvedCheck: | 
 | 3556 |     case TypeCheckKind::kInterfaceCheck: | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3557 |       call_kind = LocationSummary::kCallOnSlowPath; | 
 | 3558 |       break; | 
 | 3559 |   } | 
 | 3560 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3561 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); | 
 | 3562 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 3563 |   locations->SetInAt(1, Location::RequiresRegister()); | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3564 |   // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64. | 
 | 3565 |   locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind)); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3566 | } | 
 | 3567 |  | 
 | 3568 | void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3569 |   TypeCheckKind type_check_kind = instruction->GetTypeCheckKind(); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3570 |   LocationSummary* locations = instruction->GetLocations(); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3571 |   Location obj_loc = locations->InAt(0); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3572 |   Register obj = InputRegisterAt(instruction, 0); | 
 | 3573 |   Register cls = InputRegisterAt(instruction, 1); | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3574 |   const size_t num_temps = NumberOfCheckCastTemps(type_check_kind); | 
 | 3575 |   DCHECK_GE(num_temps, 1u); | 
 | 3576 |   DCHECK_LE(num_temps, 3u); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3577 |   Location temp_loc = locations->GetTemp(0); | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3578 |   Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation(); | 
 | 3579 |   Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation(); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3580 |   Register temp = WRegisterFrom(temp_loc); | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3581 |   const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); | 
 | 3582 |   const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value(); | 
 | 3583 |   const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value(); | 
 | 3584 |   const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value(); | 
 | 3585 |   const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value(); | 
 | 3586 |   const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value(); | 
 | 3587 |   const uint32_t object_array_data_offset = | 
 | 3588 |       mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value(); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3589 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3590 |   bool is_type_check_slow_path_fatal = | 
 | 3591 |       (type_check_kind == TypeCheckKind::kExactCheck || | 
 | 3592 |        type_check_kind == TypeCheckKind::kAbstractClassCheck || | 
 | 3593 |        type_check_kind == TypeCheckKind::kClassHierarchyCheck || | 
 | 3594 |        type_check_kind == TypeCheckKind::kArrayObjectCheck) && | 
 | 3595 |       !instruction->CanThrowIntoCatchBlock(); | 
 | 3596 |   SlowPathCodeARM64* type_check_slow_path = | 
 | 3597 |       new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction, | 
 | 3598 |                                                           is_type_check_slow_path_fatal); | 
 | 3599 |   codegen_->AddSlowPath(type_check_slow_path); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3600 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3601 |   vixl::aarch64::Label done; | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3602 |   // Avoid null check if we know obj is not null. | 
 | 3603 |   if (instruction->MustDoNullCheck()) { | 
| Guillaume "Vermeille" Sanchez | af88835 | 2015-04-20 14:41:30 +0100 | [diff] [blame] | 3604 |     __ Cbz(obj, &done); | 
 | 3605 |   } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3606 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3607 |   switch (type_check_kind) { | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3608 |     case TypeCheckKind::kExactCheck: | 
 | 3609 |     case TypeCheckKind::kArrayCheck: { | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3610 |       // /* HeapReference<Class> */ temp = obj->klass_ | 
 | 3611 |       GenerateReferenceLoadTwoRegisters(instruction, | 
 | 3612 |                                         temp_loc, | 
 | 3613 |                                         obj_loc, | 
 | 3614 |                                         class_offset, | 
 | 3615 |                                         maybe_temp2_loc, | 
 | 3616 |                                         kEmitCompilerReadBarrier); | 
 | 3617 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3618 |       __ Cmp(temp, cls); | 
 | 3619 |       // Jump to slow path for throwing the exception or doing a | 
 | 3620 |       // more involved array check. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3621 |       __ B(ne, type_check_slow_path->GetEntryLabel()); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3622 |       break; | 
 | 3623 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3624 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3625 |     case TypeCheckKind::kAbstractClassCheck: { | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3626 |       // /* HeapReference<Class> */ temp = obj->klass_ | 
 | 3627 |       GenerateReferenceLoadTwoRegisters(instruction, | 
 | 3628 |                                         temp_loc, | 
 | 3629 |                                         obj_loc, | 
 | 3630 |                                         class_offset, | 
 | 3631 |                                         maybe_temp2_loc, | 
 | 3632 |                                         kEmitCompilerReadBarrier); | 
 | 3633 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3634 |       // If the class is abstract, we eagerly fetch the super class of the | 
 | 3635 |       // object to avoid doing a comparison we know will fail. | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 3636 |       vixl::aarch64::Label loop; | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3637 |       __ Bind(&loop); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3638 |       // /* HeapReference<Class> */ temp = temp->super_class_ | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3639 |       GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3640 |  | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 3641 |       // If the class reference currently in `temp` is null, jump to the slow path to throw the | 
 | 3642 |       // exception. | 
 | 3643 |       __ Cbz(temp, type_check_slow_path->GetEntryLabel()); | 
 | 3644 |       // Otherwise, compare classes. | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3645 |       __ Cmp(temp, cls); | 
 | 3646 |       __ B(ne, &loop); | 
 | 3647 |       break; | 
 | 3648 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3649 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3650 |     case TypeCheckKind::kClassHierarchyCheck: { | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3651 |       // /* HeapReference<Class> */ temp = obj->klass_ | 
 | 3652 |       GenerateReferenceLoadTwoRegisters(instruction, | 
 | 3653 |                                         temp_loc, | 
 | 3654 |                                         obj_loc, | 
 | 3655 |                                         class_offset, | 
 | 3656 |                                         maybe_temp2_loc, | 
 | 3657 |                                         kEmitCompilerReadBarrier); | 
 | 3658 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3659 |       // Walk over the class hierarchy to find a match. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3660 |       vixl::aarch64::Label loop; | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3661 |       __ Bind(&loop); | 
 | 3662 |       __ Cmp(temp, cls); | 
| Nicolas Geoffray | abfcf18 | 2015-09-21 18:41:21 +0100 | [diff] [blame] | 3663 |       __ B(eq, &done); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3664 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3665 |       // /* HeapReference<Class> */ temp = temp->super_class_ | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3666 |       GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3667 |  | 
 | 3668 |       // If the class reference currently in `temp` is not null, jump | 
 | 3669 |       // back at the beginning of the loop. | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3670 |       __ Cbnz(temp, &loop); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3671 |       // Otherwise, jump to the slow path to throw the exception. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3672 |       __ B(type_check_slow_path->GetEntryLabel()); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3673 |       break; | 
 | 3674 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3675 |  | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3676 |     case TypeCheckKind::kArrayObjectCheck: { | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3677 |       // /* HeapReference<Class> */ temp = obj->klass_ | 
 | 3678 |       GenerateReferenceLoadTwoRegisters(instruction, | 
 | 3679 |                                         temp_loc, | 
 | 3680 |                                         obj_loc, | 
 | 3681 |                                         class_offset, | 
 | 3682 |                                         maybe_temp2_loc, | 
 | 3683 |                                         kEmitCompilerReadBarrier); | 
 | 3684 |  | 
| Nicolas Geoffray | abfcf18 | 2015-09-21 18:41:21 +0100 | [diff] [blame] | 3685 |       // Do an exact check. | 
 | 3686 |       __ Cmp(temp, cls); | 
 | 3687 |       __ B(eq, &done); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3688 |  | 
 | 3689 |       // Otherwise, we need to check that the object's class is a non-primitive array. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3690 |       // /* HeapReference<Class> */ temp = temp->component_type_ | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3691 |       GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3692 |  | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 3693 |       // If the component type is null, jump to the slow path to throw the exception. | 
 | 3694 |       __ Cbz(temp, type_check_slow_path->GetEntryLabel()); | 
 | 3695 |       // Otherwise, the object is indeed an array. Further check that this component type is not a | 
 | 3696 |       // primitive type. | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3697 |       __ Ldrh(temp, HeapOperand(temp, primitive_offset)); | 
 | 3698 |       static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); | 
| Mathieu Chartier | b99f4d6 | 2016-11-07 16:17:26 -0800 | [diff] [blame] | 3699 |       __ Cbnz(temp, type_check_slow_path->GetEntryLabel()); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3700 |       break; | 
 | 3701 |     } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3702 |  | 
| Calin Juravle | 98893e1 | 2015-10-02 21:05:03 +0100 | [diff] [blame] | 3703 |     case TypeCheckKind::kUnresolvedCheck: | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3704 |       // We always go into the type check slow path for the unresolved check cases. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3705 |       // | 
 | 3706 |       // We cannot directly call the CheckCast runtime entry point | 
 | 3707 |       // without resorting to a type checking slow path here (i.e. by | 
 | 3708 |       // calling InvokeRuntime directly), as it would require to | 
 | 3709 |       // assign fixed registers for the inputs of this HInstanceOf | 
 | 3710 |       // instruction (following the runtime calling convention), which | 
 | 3711 |       // might be cluttered by the potential first read barrier | 
 | 3712 |       // emission at the beginning of this method. | 
 | 3713 |       __ B(type_check_slow_path->GetEntryLabel()); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3714 |       break; | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 3715 |     case TypeCheckKind::kInterfaceCheck: { | 
 | 3716 |       // /* HeapReference<Class> */ temp = obj->klass_ | 
 | 3717 |       GenerateReferenceLoadTwoRegisters(instruction, | 
 | 3718 |                                         temp_loc, | 
 | 3719 |                                         obj_loc, | 
 | 3720 |                                         class_offset, | 
 | 3721 |                                         maybe_temp2_loc, | 
 | 3722 |                                         /*emit_read_barrier*/ false); | 
 | 3723 |  | 
 | 3724 |       // /* HeapReference<Class> */ temp = temp->iftable_ | 
 | 3725 |       GenerateReferenceLoadTwoRegisters(instruction, | 
 | 3726 |                                         temp_loc, | 
 | 3727 |                                         temp_loc, | 
 | 3728 |                                         iftable_offset, | 
 | 3729 |                                         maybe_temp2_loc, | 
 | 3730 |                                         /*emit_read_barrier*/ false); | 
 | 3731 |       vixl::aarch64::Label is_null; | 
 | 3732 |       // Null iftable means it is empty and will always fail the check. | 
 | 3733 |       __ Cbz(temp, &is_null); | 
 | 3734 |  | 
 | 3735 |       // Loop through the iftable and check if any class matches. | 
 | 3736 |       __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset)); | 
 | 3737 |  | 
 | 3738 |       vixl::aarch64::Label start_loop; | 
 | 3739 |       __ Bind(&start_loop); | 
 | 3740 |       __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset)); | 
 | 3741 |       GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc)); | 
 | 3742 |       __ Cmp(cls, WRegisterFrom(maybe_temp3_loc)); | 
 | 3743 |       __ B(eq, &done);  // Return if same class. | 
 | 3744 |       // Go to next interface. | 
 | 3745 |       __ Add(temp, temp, 2 * kHeapReferenceSize); | 
 | 3746 |       __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2); | 
 | 3747 |       __ Cbnz(WRegisterFrom(maybe_temp2_loc), &start_loop); | 
 | 3748 |       __ Bind(&is_null); | 
 | 3749 |  | 
 | 3750 |       __ B(type_check_slow_path->GetEntryLabel()); | 
 | 3751 |       break; | 
 | 3752 |     } | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3753 |   } | 
| Nicolas Geoffray | 7537437 | 2015-09-17 17:12:19 +0000 | [diff] [blame] | 3754 |   __ Bind(&done); | 
| Nicolas Geoffray | 85c7bab | 2015-09-18 13:40:46 +0000 | [diff] [blame] | 3755 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3756 |   __ Bind(type_check_slow_path->GetExitLabel()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3757 | } | 
 | 3758 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3759 | void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) { | 
 | 3760 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant); | 
 | 3761 |   locations->SetOut(Location::ConstantLocation(constant)); | 
 | 3762 | } | 
 | 3763 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 3764 | void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3765 |   // Will be generated at use site. | 
 | 3766 | } | 
 | 3767 |  | 
| Nicolas Geoffray | d6138ef | 2015-02-18 14:48:53 +0000 | [diff] [blame] | 3768 | void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) { | 
 | 3769 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant); | 
 | 3770 |   locations->SetOut(Location::ConstantLocation(constant)); | 
 | 3771 | } | 
 | 3772 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 3773 | void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) { | 
| Nicolas Geoffray | d6138ef | 2015-02-18 14:48:53 +0000 | [diff] [blame] | 3774 |   // Will be generated at use site. | 
| Nicolas Geoffray | d6138ef | 2015-02-18 14:48:53 +0000 | [diff] [blame] | 3775 | } | 
 | 3776 |  | 
| Calin Juravle | 175dc73 | 2015-08-25 15:42:32 +0100 | [diff] [blame] | 3777 | void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) { | 
 | 3778 |   // The trampoline uses the same calling convention as dex calling conventions, | 
 | 3779 |   // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain | 
 | 3780 |   // the method_idx. | 
 | 3781 |   HandleInvoke(invoke); | 
 | 3782 | } | 
 | 3783 |  | 
 | 3784 | void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) { | 
 | 3785 |   codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke); | 
 | 3786 | } | 
 | 3787 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3788 | void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) { | 
| Roland Levillain | 2d27c8e | 2015-04-28 15:48:45 +0100 | [diff] [blame] | 3789 |   InvokeDexCallingConventionVisitorARM64 calling_convention_visitor; | 
| Nicolas Geoffray | fd88f16 | 2015-06-03 11:23:52 +0100 | [diff] [blame] | 3790 |   CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3791 | } | 
 | 3792 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3793 | void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) { | 
 | 3794 |   HandleInvoke(invoke); | 
 | 3795 | } | 
 | 3796 |  | 
 | 3797 | void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) { | 
 | 3798 |   // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3799 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 3800 |   Register temp = XRegisterFrom(locations->GetTemp(0)); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3801 |   Location receiver = locations->InAt(0); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3802 |   Offset class_offset = mirror::Object::ClassOffset(); | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 3803 |   Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3804 |  | 
 | 3805 |   // The register ip1 is required to be used for the hidden argument in | 
 | 3806 |   // art_quick_imt_conflict_trampoline, so prevent VIXL from using it. | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 3807 |   MacroAssembler* masm = GetVIXLAssembler(); | 
 | 3808 |   UseScratchRegisterScope scratch_scope(masm); | 
 | 3809 |   BlockPoolsScope block_pools(masm); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3810 |   scratch_scope.Exclude(ip1); | 
 | 3811 |   __ Mov(ip1, invoke->GetDexMethodIndex()); | 
 | 3812 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3813 |   if (receiver.IsStackSlot()) { | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 3814 |     __ Ldr(temp.W(), StackOperandFrom(receiver)); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3815 |     // /* HeapReference<Class> */ temp = temp->klass_ | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 3816 |     __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3817 |   } else { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3818 |     // /* HeapReference<Class> */ temp = receiver->klass_ | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 3819 |     __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3820 |   } | 
| Calin Juravle | 77520bc | 2015-01-12 18:45:46 +0000 | [diff] [blame] | 3821 |   codegen_->MaybeRecordImplicitNullCheck(invoke); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3822 |   // Instead of simply (possibly) unpoisoning `temp` here, we should | 
 | 3823 |   // emit a read barrier for the previous class reference load. | 
 | 3824 |   // However this is not required in practice, as this is an | 
 | 3825 |   // intermediate/temporary reference and because the current | 
 | 3826 |   // concurrent copying collector keeps the from-space memory | 
 | 3827 |   // intact/accessible until the end of the marking phase (the | 
 | 3828 |   // concurrent copying collector may not in the future). | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 3829 |   GetAssembler()->MaybeUnpoisonHeapReference(temp.W()); | 
| Artem Udovichenko | a62cb9b | 2016-06-30 09:18:25 +0000 | [diff] [blame] | 3830 |   __ Ldr(temp, | 
 | 3831 |       MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value())); | 
 | 3832 |   uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement( | 
| Matthew Gharrity | 465ecc8 | 2016-07-19 21:32:52 +0000 | [diff] [blame] | 3833 |       invoke->GetImtIndex(), kArm64PointerSize)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3834 |   // temp = temp->GetImtEntryAt(method_offset); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 3835 |   __ Ldr(temp, MemOperand(temp, method_offset)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3836 |   // lr = temp->GetEntryPoint(); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 3837 |   __ Ldr(lr, MemOperand(temp, entry_point.Int32Value())); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3838 |   // lr(); | 
 | 3839 |   __ Blr(lr); | 
 | 3840 |   DCHECK(!codegen_->IsLeafMethod()); | 
 | 3841 |   codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); | 
 | 3842 | } | 
 | 3843 |  | 
 | 3844 | void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) { | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 3845 |   IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena()); | 
 | 3846 |   if (intrinsic.TryDispatch(invoke)) { | 
 | 3847 |     return; | 
 | 3848 |   } | 
 | 3849 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3850 |   HandleInvoke(invoke); | 
 | 3851 | } | 
 | 3852 |  | 
| Nicolas Geoffray | e53798a | 2014-12-01 10:31:54 +0000 | [diff] [blame] | 3853 | void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { | 
| David Brazdil | 58282f4 | 2016-01-14 12:45:10 +0000 | [diff] [blame] | 3854 |   // Explicit clinit checks triggered by static invokes must have been pruned by | 
 | 3855 |   // art::PrepareForRegisterAllocation. | 
 | 3856 |   DCHECK(!invoke->IsStaticWithExplicitClinitCheck()); | 
| Roland Levillain | 4c0eb42 | 2015-04-24 16:43:49 +0100 | [diff] [blame] | 3857 |  | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 3858 |   IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena()); | 
 | 3859 |   if (intrinsic.TryDispatch(invoke)) { | 
 | 3860 |     return; | 
 | 3861 |   } | 
 | 3862 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 3863 |   HandleInvoke(invoke); | 
 | 3864 | } | 
 | 3865 |  | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 3866 | static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) { | 
 | 3867 |   if (invoke->GetLocations()->Intrinsified()) { | 
 | 3868 |     IntrinsicCodeGeneratorARM64 intrinsic(codegen); | 
 | 3869 |     intrinsic.Dispatch(invoke); | 
 | 3870 |     return true; | 
 | 3871 |   } | 
 | 3872 |   return false; | 
 | 3873 | } | 
 | 3874 |  | 
| Vladimir Marko | dc151b2 | 2015-10-15 18:02:30 +0100 | [diff] [blame] | 3875 | HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch( | 
 | 3876 |       const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, | 
| Nicolas Geoffray | 5e4e11e | 2016-09-22 13:17:41 +0100 | [diff] [blame] | 3877 |       HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 3878 |   // On ARM64 we support all dispatch types. | 
| Vladimir Marko | dc151b2 | 2015-10-15 18:02:30 +0100 | [diff] [blame] | 3879 |   return desired_dispatch_info; | 
 | 3880 | } | 
 | 3881 |  | 
| Nicolas Geoffray | 38207af | 2015-06-01 15:46:22 +0100 | [diff] [blame] | 3882 | void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) { | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3883 |   // For better instruction scheduling we load the direct code pointer before the method pointer. | 
 | 3884 |   bool direct_code_loaded = false; | 
 | 3885 |   switch (invoke->GetCodePtrLocation()) { | 
 | 3886 |     case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup: | 
 | 3887 |       // LR = code address from literal pool with link-time patch. | 
 | 3888 |       __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod())); | 
 | 3889 |       direct_code_loaded = true; | 
 | 3890 |       break; | 
 | 3891 |     case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect: | 
 | 3892 |       // LR = invoke->GetDirectCodePtr(); | 
 | 3893 |       __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr())); | 
 | 3894 |       direct_code_loaded = true; | 
 | 3895 |       break; | 
 | 3896 |     default: | 
 | 3897 |       break; | 
 | 3898 |   } | 
 | 3899 |  | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 3900 |   // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention. | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3901 |   Location callee_method = temp;  // For all kinds except kRecursive, callee will be in temp. | 
 | 3902 |   switch (invoke->GetMethodLoadKind()) { | 
| Nicolas Geoffray | da079bb | 2016-09-26 17:56:07 +0100 | [diff] [blame] | 3903 |     case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: { | 
 | 3904 |       uint32_t offset = | 
 | 3905 |           GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value(); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3906 |       // temp = thread->string_init_entrypoint | 
| Nicolas Geoffray | da079bb | 2016-09-26 17:56:07 +0100 | [diff] [blame] | 3907 |       __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset)); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3908 |       break; | 
| Nicolas Geoffray | da079bb | 2016-09-26 17:56:07 +0100 | [diff] [blame] | 3909 |     } | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3910 |     case HInvokeStaticOrDirect::MethodLoadKind::kRecursive: | 
| Vladimir Marko | c53c079 | 2015-11-19 15:48:33 +0000 | [diff] [blame] | 3911 |       callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3912 |       break; | 
 | 3913 |     case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress: | 
 | 3914 |       // Load method address from literal pool. | 
| Alexandre Rames | 6dc0174 | 2015-11-12 14:44:19 +0000 | [diff] [blame] | 3915 |       __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress())); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3916 |       break; | 
 | 3917 |     case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup: | 
 | 3918 |       // Load method address from literal pool with a link-time patch. | 
| Alexandre Rames | 6dc0174 | 2015-11-12 14:44:19 +0000 | [diff] [blame] | 3919 |       __ Ldr(XRegisterFrom(temp), | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3920 |              DeduplicateMethodAddressLiteral(invoke->GetTargetMethod())); | 
 | 3921 |       break; | 
 | 3922 |     case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: { | 
 | 3923 |       // Add ADRP with its PC-relative DexCache access patch. | 
| Nicolas Geoffray | 5e4e11e | 2016-09-22 13:17:41 +0100 | [diff] [blame] | 3924 |       const DexFile& dex_file = invoke->GetDexFile(); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 3925 |       uint32_t element_offset = invoke->GetDexCacheArrayOffset(); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3926 |       vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 3927 |       EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp)); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3928 |       // Add LDR with its PC-relative DexCache access patch. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3929 |       vixl::aarch64::Label* ldr_label = | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 3930 |           NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 3931 |       EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp)); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3932 |       break; | 
| Vladimir Marko | 9b688a0 | 2015-05-06 14:12:42 +0100 | [diff] [blame] | 3933 |     } | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3934 |     case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: { | 
| Vladimir Marko | c53c079 | 2015-11-19 15:48:33 +0000 | [diff] [blame] | 3935 |       Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3936 |       Register reg = XRegisterFrom(temp); | 
 | 3937 |       Register method_reg; | 
 | 3938 |       if (current_method.IsRegister()) { | 
 | 3939 |         method_reg = XRegisterFrom(current_method); | 
 | 3940 |       } else { | 
 | 3941 |         DCHECK(invoke->GetLocations()->Intrinsified()); | 
 | 3942 |         DCHECK(!current_method.IsValid()); | 
 | 3943 |         method_reg = reg; | 
 | 3944 |         __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset)); | 
 | 3945 |       } | 
| Vladimir Marko | b2c431e | 2015-08-19 12:45:42 +0000 | [diff] [blame] | 3946 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 3947 |       // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_; | 
| Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 3948 |       __ Ldr(reg.X(), | 
 | 3949 |              MemOperand(method_reg.X(), | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 3950 |                         ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value())); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3951 |       // temp = temp[index_in_cache]; | 
| Vladimir Marko | 40ecb12 | 2016-04-06 17:33:41 +0100 | [diff] [blame] | 3952 |       // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file. | 
 | 3953 |       uint32_t index_in_cache = invoke->GetDexMethodIndex(); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3954 |     __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache))); | 
 | 3955 |       break; | 
 | 3956 |     } | 
 | 3957 |   } | 
 | 3958 |  | 
 | 3959 |   switch (invoke->GetCodePtrLocation()) { | 
 | 3960 |     case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf: | 
 | 3961 |       __ Bl(&frame_entry_label_); | 
 | 3962 |       break; | 
 | 3963 |     case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: { | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 3964 |       relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file, | 
 | 3965 |                                           invoke->GetTargetMethod().dex_method_index); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 3966 |       vixl::aarch64::Label* label = &relative_call_patches_.back().label; | 
 | 3967 |       SingleEmissionCheckScope guard(GetVIXLAssembler()); | 
| Alexandre Rames | 6dc0174 | 2015-11-12 14:44:19 +0000 | [diff] [blame] | 3968 |       __ Bind(label); | 
 | 3969 |       __ bl(0);  // Branch and link to itself. This will be overriden at link time. | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3970 |       break; | 
 | 3971 |     } | 
 | 3972 |     case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup: | 
 | 3973 |     case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect: | 
 | 3974 |       // LR prepared above for better instruction scheduling. | 
 | 3975 |       DCHECK(direct_code_loaded); | 
 | 3976 |       // lr() | 
 | 3977 |       __ Blr(lr); | 
 | 3978 |       break; | 
 | 3979 |     case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod: | 
 | 3980 |       // LR = callee_method->entry_point_from_quick_compiled_code_; | 
 | 3981 |       __ Ldr(lr, MemOperand( | 
| Alexandre Rames | 6dc0174 | 2015-11-12 14:44:19 +0000 | [diff] [blame] | 3982 |           XRegisterFrom(callee_method), | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 3983 |           ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value())); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 3984 |       // lr() | 
 | 3985 |       __ Blr(lr); | 
 | 3986 |       break; | 
| Nicolas Geoffray | 1cf9528 | 2014-12-12 19:22:03 +0000 | [diff] [blame] | 3987 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 3988 |  | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 3989 |   DCHECK(!IsLeafMethod()); | 
 | 3990 | } | 
 | 3991 |  | 
| Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 3992 | void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) { | 
| Nicolas Geoffray | e523423 | 2015-12-02 09:06:11 +0000 | [diff] [blame] | 3993 |   // Use the calling convention instead of the location of the receiver, as | 
 | 3994 |   // intrinsics may have put the receiver in a different register. In the intrinsics | 
 | 3995 |   // slow path, the arguments have been moved to the right place, so here we are | 
 | 3996 |   // guaranteed that the receiver is the first register of the calling convention. | 
 | 3997 |   InvokeDexCallingConvention calling_convention; | 
 | 3998 |   Register receiver = calling_convention.GetRegisterAt(0); | 
| Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 3999 |   Register temp = XRegisterFrom(temp_in); | 
 | 4000 |   size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset( | 
 | 4001 |       invoke->GetVTableIndex(), kArm64PointerSize).SizeValue(); | 
 | 4002 |   Offset class_offset = mirror::Object::ClassOffset(); | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 4003 |   Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize); | 
| Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 4004 |  | 
 | 4005 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
 | 4006 |  | 
 | 4007 |   DCHECK(receiver.IsRegister()); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 4008 |   // /* HeapReference<Class> */ temp = receiver->klass_ | 
| Nicolas Geoffray | e523423 | 2015-12-02 09:06:11 +0000 | [diff] [blame] | 4009 |   __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset)); | 
| Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 4010 |   MaybeRecordImplicitNullCheck(invoke); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 4011 |   // Instead of simply (possibly) unpoisoning `temp` here, we should | 
 | 4012 |   // emit a read barrier for the previous class reference load. | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 4013 |   // intermediate/temporary reference and because the current | 
 | 4014 |   // concurrent copying collector keeps the from-space memory | 
 | 4015 |   // intact/accessible until the end of the marking phase (the | 
 | 4016 |   // concurrent copying collector may not in the future). | 
| Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 4017 |   GetAssembler()->MaybeUnpoisonHeapReference(temp.W()); | 
 | 4018 |   // temp = temp->GetMethodAt(method_offset); | 
 | 4019 |   __ Ldr(temp, MemOperand(temp, method_offset)); | 
 | 4020 |   // lr = temp->GetEntryPoint(); | 
 | 4021 |   __ Ldr(lr, MemOperand(temp, entry_point.SizeValue())); | 
 | 4022 |   // lr(); | 
 | 4023 |   __ Blr(lr); | 
 | 4024 | } | 
 | 4025 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4026 | vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch( | 
 | 4027 |     const DexFile& dex_file, | 
 | 4028 |     uint32_t string_index, | 
 | 4029 |     vixl::aarch64::Label* adrp_label) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4030 |   return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_); | 
 | 4031 | } | 
 | 4032 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4033 | vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch( | 
 | 4034 |     const DexFile& dex_file, | 
 | 4035 |     uint32_t type_index, | 
 | 4036 |     vixl::aarch64::Label* adrp_label) { | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4037 |   return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_); | 
 | 4038 | } | 
 | 4039 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4040 | vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch( | 
 | 4041 |     const DexFile& dex_file, | 
 | 4042 |     uint32_t element_offset, | 
 | 4043 |     vixl::aarch64::Label* adrp_label) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4044 |   return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_); | 
 | 4045 | } | 
 | 4046 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4047 | vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch( | 
 | 4048 |     const DexFile& dex_file, | 
 | 4049 |     uint32_t offset_or_index, | 
 | 4050 |     vixl::aarch64::Label* adrp_label, | 
 | 4051 |     ArenaDeque<PcRelativePatchInfo>* patches) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4052 |   // Add a patch entry and return the label. | 
 | 4053 |   patches->emplace_back(dex_file, offset_or_index); | 
 | 4054 |   PcRelativePatchInfo* info = &patches->back(); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4055 |   vixl::aarch64::Label* label = &info->label; | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4056 |   // If adrp_label is null, this is the ADRP patch and needs to point to its own label. | 
 | 4057 |   info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label; | 
 | 4058 |   return label; | 
 | 4059 | } | 
 | 4060 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4061 | vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral( | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4062 |     const DexFile& dex_file, uint32_t string_index) { | 
 | 4063 |   return boot_image_string_patches_.GetOrCreate( | 
 | 4064 |       StringReference(&dex_file, string_index), | 
 | 4065 |       [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); }); | 
 | 4066 | } | 
 | 4067 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4068 | vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral( | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4069 |     const DexFile& dex_file, uint32_t type_index) { | 
 | 4070 |   return boot_image_type_patches_.GetOrCreate( | 
 | 4071 |       TypeReference(&dex_file, type_index), | 
 | 4072 |       [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); }); | 
 | 4073 | } | 
 | 4074 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4075 | vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral( | 
 | 4076 |     uint64_t address) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4077 |   bool needs_patch = GetCompilerOptions().GetIncludePatchInformation(); | 
 | 4078 |   Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_; | 
 | 4079 |   return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map); | 
 | 4080 | } | 
 | 4081 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4082 | vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral( | 
 | 4083 |     uint64_t address) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4084 |   return DeduplicateUint64Literal(address); | 
 | 4085 | } | 
 | 4086 |  | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 4087 | vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral( | 
 | 4088 |     const DexFile& dex_file, uint32_t string_index) { | 
 | 4089 |   jit_string_roots_.Overwrite(StringReference(&dex_file, string_index), /* placeholder */ 0u); | 
 | 4090 |   return jit_string_patches_.GetOrCreate( | 
 | 4091 |       StringReference(&dex_file, string_index), | 
 | 4092 |       [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); }); | 
 | 4093 | } | 
 | 4094 |  | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4095 | void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, | 
 | 4096 |                                              vixl::aarch64::Register reg) { | 
 | 4097 |   DCHECK(reg.IsX()); | 
 | 4098 |   SingleEmissionCheckScope guard(GetVIXLAssembler()); | 
 | 4099 |   __ Bind(fixup_label); | 
 | 4100 |   __ adrp(reg, /* offset placeholder */ 0); | 
 | 4101 | } | 
 | 4102 |  | 
 | 4103 | void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label, | 
 | 4104 |                                             vixl::aarch64::Register out, | 
 | 4105 |                                             vixl::aarch64::Register base) { | 
 | 4106 |   DCHECK(out.IsX()); | 
 | 4107 |   DCHECK(base.IsX()); | 
 | 4108 |   SingleEmissionCheckScope guard(GetVIXLAssembler()); | 
 | 4109 |   __ Bind(fixup_label); | 
 | 4110 |   __ add(out, base, Operand(/* offset placeholder */ 0)); | 
 | 4111 | } | 
 | 4112 |  | 
 | 4113 | void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label, | 
 | 4114 |                                                   vixl::aarch64::Register out, | 
 | 4115 |                                                   vixl::aarch64::Register base) { | 
 | 4116 |   DCHECK(base.IsX()); | 
 | 4117 |   SingleEmissionCheckScope guard(GetVIXLAssembler()); | 
 | 4118 |   __ Bind(fixup_label); | 
 | 4119 |   __ ldr(out, MemOperand(base, /* offset placeholder */ 0)); | 
 | 4120 | } | 
 | 4121 |  | 
 | 4122 | template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> | 
 | 4123 | inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches( | 
 | 4124 |     const ArenaDeque<PcRelativePatchInfo>& infos, | 
 | 4125 |     ArenaVector<LinkerPatch>* linker_patches) { | 
 | 4126 |   for (const PcRelativePatchInfo& info : infos) { | 
 | 4127 |     linker_patches->push_back(Factory(info.label.GetLocation(), | 
 | 4128 |                                       &info.target_dex_file, | 
 | 4129 |                                       info.pc_insn_label->GetLocation(), | 
 | 4130 |                                       info.offset_or_index)); | 
 | 4131 |   } | 
 | 4132 | } | 
 | 4133 |  | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4134 | void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) { | 
 | 4135 |   DCHECK(linker_patches->empty()); | 
 | 4136 |   size_t size = | 
 | 4137 |       method_patches_.size() + | 
 | 4138 |       call_patches_.size() + | 
 | 4139 |       relative_call_patches_.size() + | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4140 |       pc_relative_dex_cache_patches_.size() + | 
 | 4141 |       boot_image_string_patches_.size() + | 
 | 4142 |       pc_relative_string_patches_.size() + | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4143 |       boot_image_type_patches_.size() + | 
 | 4144 |       pc_relative_type_patches_.size() + | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4145 |       boot_image_address_patches_.size(); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4146 |   linker_patches->reserve(size); | 
 | 4147 |   for (const auto& entry : method_patches_) { | 
 | 4148 |     const MethodReference& target_method = entry.first; | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4149 |     vixl::aarch64::Literal<uint64_t>* literal = entry.second; | 
 | 4150 |     linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(), | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4151 |                                                        target_method.dex_file, | 
 | 4152 |                                                        target_method.dex_method_index)); | 
 | 4153 |   } | 
 | 4154 |   for (const auto& entry : call_patches_) { | 
 | 4155 |     const MethodReference& target_method = entry.first; | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4156 |     vixl::aarch64::Literal<uint64_t>* literal = entry.second; | 
 | 4157 |     linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(), | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4158 |                                                      target_method.dex_file, | 
 | 4159 |                                                      target_method.dex_method_index)); | 
 | 4160 |   } | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4161 |   for (const PatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) { | 
 | 4162 |     linker_patches->push_back( | 
 | 4163 |         LinkerPatch::RelativeCodePatch(info.label.GetLocation(), &info.dex_file, info.index)); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4164 |   } | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4165 |   for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4166 |     linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(), | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4167 |                                                               &info.target_dex_file, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4168 |                                                               info.pc_insn_label->GetLocation(), | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4169 |                                                               info.offset_or_index)); | 
 | 4170 |   } | 
 | 4171 |   for (const auto& entry : boot_image_string_patches_) { | 
 | 4172 |     const StringReference& target_string = entry.first; | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4173 |     vixl::aarch64::Literal<uint32_t>* literal = entry.second; | 
 | 4174 |     linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(), | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4175 |                                                        target_string.dex_file, | 
 | 4176 |                                                        target_string.string_index)); | 
 | 4177 |   } | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4178 |   if (!GetCompilerOptions().IsBootImage()) { | 
 | 4179 |     EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_, | 
 | 4180 |                                                                   linker_patches); | 
 | 4181 |   } else { | 
 | 4182 |     EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_, | 
 | 4183 |                                                                   linker_patches); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4184 |   } | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4185 |   for (const auto& entry : boot_image_type_patches_) { | 
 | 4186 |     const TypeReference& target_type = entry.first; | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4187 |     vixl::aarch64::Literal<uint32_t>* literal = entry.second; | 
 | 4188 |     linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(), | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4189 |                                                      target_type.dex_file, | 
 | 4190 |                                                      target_type.type_index)); | 
 | 4191 |   } | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4192 |   EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_, | 
 | 4193 |                                                                 linker_patches); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4194 |   for (const auto& entry : boot_image_address_patches_) { | 
 | 4195 |     DCHECK(GetCompilerOptions().GetIncludePatchInformation()); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4196 |     vixl::aarch64::Literal<uint32_t>* literal = entry.second; | 
 | 4197 |     linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset())); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4198 |   } | 
 | 4199 | } | 
 | 4200 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4201 | vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value, | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4202 |                                                                       Uint32ToLiteralMap* map) { | 
 | 4203 |   return map->GetOrCreate( | 
 | 4204 |       value, | 
 | 4205 |       [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); }); | 
 | 4206 | } | 
 | 4207 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4208 | vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4209 |   return uint64_literals_.GetOrCreate( | 
 | 4210 |       value, | 
 | 4211 |       [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); }); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4212 | } | 
 | 4213 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4214 | vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral( | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4215 |     MethodReference target_method, | 
 | 4216 |     MethodToLiteralMap* map) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4217 |   return map->GetOrCreate( | 
 | 4218 |       target_method, | 
 | 4219 |       [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); }); | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4220 | } | 
 | 4221 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4222 | vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral( | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4223 |     MethodReference target_method) { | 
 | 4224 |   return DeduplicateMethodLiteral(target_method, &method_patches_); | 
 | 4225 | } | 
 | 4226 |  | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4227 | vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral( | 
| Vladimir Marko | 5815501 | 2015-08-19 12:49:41 +0000 | [diff] [blame] | 4228 |     MethodReference target_method) { | 
 | 4229 |   return DeduplicateMethodLiteral(target_method, &call_patches_); | 
 | 4230 | } | 
 | 4231 |  | 
 | 4232 |  | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 4233 | void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { | 
| David Brazdil | 58282f4 | 2016-01-14 12:45:10 +0000 | [diff] [blame] | 4234 |   // Explicit clinit checks triggered by static invokes must have been pruned by | 
 | 4235 |   // art::PrepareForRegisterAllocation. | 
 | 4236 |   DCHECK(!invoke->IsStaticWithExplicitClinitCheck()); | 
| Roland Levillain | 4c0eb42 | 2015-04-24 16:43:49 +0100 | [diff] [blame] | 4237 |  | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 4238 |   if (TryGenerateIntrinsicCode(invoke, codegen_)) { | 
 | 4239 |     return; | 
 | 4240 |   } | 
 | 4241 |  | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 4242 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
| Nicolas Geoffray | 38207af | 2015-06-01 15:46:22 +0100 | [diff] [blame] | 4243 |   LocationSummary* locations = invoke->GetLocations(); | 
 | 4244 |   codegen_->GenerateStaticOrDirectCall( | 
 | 4245 |       invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation()); | 
| Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 4246 |   codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4247 | } | 
 | 4248 |  | 
 | 4249 | void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) { | 
| Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 4250 |   if (TryGenerateIntrinsicCode(invoke, codegen_)) { | 
 | 4251 |     return; | 
 | 4252 |   } | 
 | 4253 |  | 
| Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 4254 |   codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0)); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4255 |   DCHECK(!codegen_->IsLeafMethod()); | 
 | 4256 |   codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); | 
 | 4257 | } | 
 | 4258 |  | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4259 | HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind( | 
 | 4260 |     HLoadClass::LoadKind desired_class_load_kind) { | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4261 |   switch (desired_class_load_kind) { | 
 | 4262 |     case HLoadClass::LoadKind::kReferrersClass: | 
 | 4263 |       break; | 
 | 4264 |     case HLoadClass::LoadKind::kBootImageLinkTimeAddress: | 
 | 4265 |       DCHECK(!GetCompilerOptions().GetCompilePic()); | 
 | 4266 |       break; | 
 | 4267 |     case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: | 
 | 4268 |       DCHECK(GetCompilerOptions().GetCompilePic()); | 
 | 4269 |       break; | 
 | 4270 |     case HLoadClass::LoadKind::kBootImageAddress: | 
 | 4271 |       break; | 
 | 4272 |     case HLoadClass::LoadKind::kDexCacheAddress: | 
 | 4273 |       DCHECK(Runtime::Current()->UseJitCompilation()); | 
 | 4274 |       break; | 
 | 4275 |     case HLoadClass::LoadKind::kDexCachePcRelative: | 
 | 4276 |       DCHECK(!Runtime::Current()->UseJitCompilation()); | 
 | 4277 |       break; | 
 | 4278 |     case HLoadClass::LoadKind::kDexCacheViaMethod: | 
 | 4279 |       break; | 
 | 4280 |   } | 
 | 4281 |   return desired_class_load_kind; | 
 | 4282 | } | 
 | 4283 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4284 | void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) { | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4285 |   if (cls->NeedsAccessCheck()) { | 
 | 4286 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 4287 |     CodeGenerator::CreateLoadClassLocationSummary( | 
 | 4288 |         cls, | 
 | 4289 |         LocationFrom(calling_convention.GetRegisterAt(0)), | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4290 |         LocationFrom(vixl::aarch64::x0), | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4291 |         /* code_generator_supports_read_barrier */ true); | 
 | 4292 |     return; | 
 | 4293 |   } | 
 | 4294 |  | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4295 |   const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); | 
 | 4296 |   LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier) | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4297 |       ? LocationSummary::kCallOnSlowPath | 
 | 4298 |       : LocationSummary::kNoCall; | 
 | 4299 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4300 |   if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 4301 |     locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers. | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 4302 |   } | 
 | 4303 |  | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4304 |   HLoadClass::LoadKind load_kind = cls->GetLoadKind(); | 
 | 4305 |   if (load_kind == HLoadClass::LoadKind::kReferrersClass || | 
 | 4306 |       load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) { | 
 | 4307 |     locations->SetInAt(0, Location::RequiresRegister()); | 
 | 4308 |   } | 
 | 4309 |   locations->SetOut(Location::RequiresRegister()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4310 | } | 
 | 4311 |  | 
 | 4312 | void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { | 
| Calin Juravle | 98893e1 | 2015-10-02 21:05:03 +0100 | [diff] [blame] | 4313 |   if (cls->NeedsAccessCheck()) { | 
 | 4314 |     codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex()); | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 4315 |     codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc()); | 
| Roland Levillain | 888d067 | 2015-11-23 18:53:50 +0000 | [diff] [blame] | 4316 |     CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>(); | 
| Calin Juravle | 580b609 | 2015-10-06 17:35:58 +0100 | [diff] [blame] | 4317 |     return; | 
 | 4318 |   } | 
 | 4319 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 4320 |   Location out_loc = cls->GetLocations()->Out(); | 
| Calin Juravle | 580b609 | 2015-10-06 17:35:58 +0100 | [diff] [blame] | 4321 |   Register out = OutputRegister(cls); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4322 |  | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4323 |   const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4324 |   bool generate_null_check = false; | 
 | 4325 |   switch (cls->GetLoadKind()) { | 
 | 4326 |     case HLoadClass::LoadKind::kReferrersClass: { | 
 | 4327 |       DCHECK(!cls->CanCallRuntime()); | 
 | 4328 |       DCHECK(!cls->MustGenerateClinitCheck()); | 
 | 4329 |       // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_ | 
 | 4330 |       Register current_method = InputRegisterAt(cls, 0); | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4331 |       GenerateGcRootFieldLoad(cls, | 
 | 4332 |                               out_loc, | 
 | 4333 |                               current_method, | 
 | 4334 |                               ArtMethod::DeclaringClassOffset().Int32Value(), | 
| Roland Levillain | 00468f3 | 2016-10-27 18:02:48 +0100 | [diff] [blame] | 4335 |                               /* fixup_label */ nullptr, | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4336 |                               requires_read_barrier); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4337 |       break; | 
 | 4338 |     } | 
 | 4339 |     case HLoadClass::LoadKind::kBootImageLinkTimeAddress: | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4340 |       DCHECK(!requires_read_barrier); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4341 |       __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(), | 
 | 4342 |                                                             cls->GetTypeIndex())); | 
 | 4343 |       break; | 
 | 4344 |     case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: { | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4345 |       DCHECK(!requires_read_barrier); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4346 |       // Add ADRP with its PC-relative type patch. | 
 | 4347 |       const DexFile& dex_file = cls->GetDexFile(); | 
 | 4348 |       uint32_t type_index = cls->GetTypeIndex(); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4349 |       vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4350 |       codegen_->EmitAdrpPlaceholder(adrp_label, out.X()); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4351 |       // Add ADD with its PC-relative type patch. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4352 |       vixl::aarch64::Label* add_label = | 
 | 4353 |           codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4354 |       codegen_->EmitAddPlaceholder(add_label, out.X(), out.X()); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4355 |       break; | 
 | 4356 |     } | 
 | 4357 |     case HLoadClass::LoadKind::kBootImageAddress: { | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4358 |       DCHECK(!requires_read_barrier); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4359 |       DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress())); | 
 | 4360 |       __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress())); | 
 | 4361 |       break; | 
 | 4362 |     } | 
 | 4363 |     case HLoadClass::LoadKind::kDexCacheAddress: { | 
 | 4364 |       DCHECK_NE(cls->GetAddress(), 0u); | 
 | 4365 |       // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads | 
 | 4366 |       // that gives a 16KiB range. To try and reduce the number of literals if we load | 
 | 4367 |       // multiple types, simply split the dex cache address to a 16KiB aligned base | 
 | 4368 |       // loaded from a literal and the remaining offset embedded in the load. | 
 | 4369 |       static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes."); | 
 | 4370 |       DCHECK_ALIGNED(cls->GetAddress(), 4u); | 
 | 4371 |       constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2; | 
 | 4372 |       uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits); | 
 | 4373 |       uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits); | 
 | 4374 |       __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address)); | 
 | 4375 |       // /* GcRoot<mirror::Class> */ out = *(base_address + offset) | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4376 |       GenerateGcRootFieldLoad(cls, | 
 | 4377 |                               out_loc, | 
 | 4378 |                               out.X(), | 
 | 4379 |                               offset, | 
| Roland Levillain | 00468f3 | 2016-10-27 18:02:48 +0100 | [diff] [blame] | 4380 |                               /* fixup_label */ nullptr, | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4381 |                               requires_read_barrier); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4382 |       generate_null_check = !cls->IsInDexCache(); | 
 | 4383 |       break; | 
 | 4384 |     } | 
 | 4385 |     case HLoadClass::LoadKind::kDexCachePcRelative: { | 
 | 4386 |       // Add ADRP with its PC-relative DexCache access patch. | 
 | 4387 |       const DexFile& dex_file = cls->GetDexFile(); | 
 | 4388 |       uint32_t element_offset = cls->GetDexCacheElementOffset(); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4389 |       vixl::aarch64::Label* adrp_label = | 
 | 4390 |           codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4391 |       codegen_->EmitAdrpPlaceholder(adrp_label, out.X()); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4392 |       // Add LDR with its PC-relative DexCache access patch. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4393 |       vixl::aarch64::Label* ldr_label = | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4394 |           codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label); | 
 | 4395 |       // /* GcRoot<mirror::Class> */ out = *(base_address + offset)  /* PC-relative */ | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4396 |       GenerateGcRootFieldLoad(cls, | 
 | 4397 |                               out_loc, | 
 | 4398 |                               out.X(), | 
 | 4399 |                               /* offset placeholder */ 0, | 
 | 4400 |                               ldr_label, | 
 | 4401 |                               requires_read_barrier); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4402 |       generate_null_check = !cls->IsInDexCache(); | 
 | 4403 |       break; | 
 | 4404 |     } | 
 | 4405 |     case HLoadClass::LoadKind::kDexCacheViaMethod: { | 
 | 4406 |       MemberOffset resolved_types_offset = | 
 | 4407 |           ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize); | 
 | 4408 |       // /* GcRoot<mirror::Class>[] */ out = | 
 | 4409 |       //        current_method.ptr_sized_fields_->dex_cache_resolved_types_ | 
 | 4410 |       Register current_method = InputRegisterAt(cls, 0); | 
 | 4411 |       __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value())); | 
 | 4412 |       // /* GcRoot<mirror::Class> */ out = out[type_index] | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4413 |       GenerateGcRootFieldLoad(cls, | 
 | 4414 |                               out_loc, | 
 | 4415 |                               out.X(), | 
 | 4416 |                               CodeGenerator::GetCacheOffset(cls->GetTypeIndex()), | 
| Roland Levillain | 00468f3 | 2016-10-27 18:02:48 +0100 | [diff] [blame] | 4417 |                               /* fixup_label */ nullptr, | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 4418 |                               requires_read_barrier); | 
| Vladimir Marko | dbb7f5b | 2016-03-30 13:23:58 +0100 | [diff] [blame] | 4419 |       generate_null_check = !cls->IsInDexCache(); | 
 | 4420 |       break; | 
 | 4421 |     } | 
 | 4422 |   } | 
 | 4423 |  | 
 | 4424 |   if (generate_null_check || cls->MustGenerateClinitCheck()) { | 
 | 4425 |     DCHECK(cls->CanCallRuntime()); | 
 | 4426 |     SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64( | 
 | 4427 |         cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); | 
 | 4428 |     codegen_->AddSlowPath(slow_path); | 
 | 4429 |     if (generate_null_check) { | 
 | 4430 |       __ Cbz(out, slow_path->GetEntryLabel()); | 
 | 4431 |     } | 
 | 4432 |     if (cls->MustGenerateClinitCheck()) { | 
 | 4433 |       GenerateClassInitializationCheck(slow_path, out); | 
 | 4434 |     } else { | 
 | 4435 |       __ Bind(slow_path->GetExitLabel()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4436 |     } | 
 | 4437 |   } | 
 | 4438 | } | 
 | 4439 |  | 
| David Brazdil | cb1c055 | 2015-08-04 16:22:25 +0100 | [diff] [blame] | 4440 | static MemOperand GetExceptionTlsAddress() { | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 4441 |   return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value()); | 
| David Brazdil | cb1c055 | 2015-08-04 16:22:25 +0100 | [diff] [blame] | 4442 | } | 
 | 4443 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4444 | void LocationsBuilderARM64::VisitLoadException(HLoadException* load) { | 
 | 4445 |   LocationSummary* locations = | 
 | 4446 |       new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall); | 
 | 4447 |   locations->SetOut(Location::RequiresRegister()); | 
 | 4448 | } | 
 | 4449 |  | 
 | 4450 | void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) { | 
| David Brazdil | cb1c055 | 2015-08-04 16:22:25 +0100 | [diff] [blame] | 4451 |   __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress()); | 
 | 4452 | } | 
 | 4453 |  | 
 | 4454 | void LocationsBuilderARM64::VisitClearException(HClearException* clear) { | 
 | 4455 |   new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall); | 
 | 4456 | } | 
 | 4457 |  | 
 | 4458 | void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) { | 
 | 4459 |   __ Str(wzr, GetExceptionTlsAddress()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4460 | } | 
 | 4461 |  | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4462 | HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind( | 
 | 4463 |     HLoadString::LoadKind desired_string_load_kind) { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4464 |   switch (desired_string_load_kind) { | 
 | 4465 |     case HLoadString::LoadKind::kBootImageLinkTimeAddress: | 
 | 4466 |       DCHECK(!GetCompilerOptions().GetCompilePic()); | 
 | 4467 |       break; | 
 | 4468 |     case HLoadString::LoadKind::kBootImageLinkTimePcRelative: | 
 | 4469 |       DCHECK(GetCompilerOptions().GetCompilePic()); | 
 | 4470 |       break; | 
 | 4471 |     case HLoadString::LoadKind::kBootImageAddress: | 
 | 4472 |       break; | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4473 |     case HLoadString::LoadKind::kBssEntry: | 
| Calin Juravle | ffc8707 | 2016-04-20 14:22:09 +0100 | [diff] [blame] | 4474 |       DCHECK(!Runtime::Current()->UseJitCompilation()); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4475 |       break; | 
 | 4476 |     case HLoadString::LoadKind::kDexCacheViaMethod: | 
 | 4477 |       break; | 
| Nicolas Geoffray | ac3ebc3 | 2016-10-05 13:13:50 +0100 | [diff] [blame] | 4478 |     case HLoadString::LoadKind::kJitTableAddress: | 
 | 4479 |       DCHECK(Runtime::Current()->UseJitCompilation()); | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 4480 |       break; | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4481 |   } | 
 | 4482 |   return desired_string_load_kind; | 
 | 4483 | } | 
 | 4484 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4485 | void LocationsBuilderARM64::VisitLoadString(HLoadString* load) { | 
| Christina Wadsworth | 1fe89ea | 2016-08-31 16:14:38 -0700 | [diff] [blame] | 4486 |   LocationSummary::CallKind call_kind = load->NeedsEnvironment() | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4487 |       ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) | 
 | 4488 |           ? LocationSummary::kCallOnMainOnly | 
 | 4489 |           : LocationSummary::kCallOnSlowPath) | 
| Nicolas Geoffray | 917d016 | 2015-11-24 18:25:35 +0000 | [diff] [blame] | 4490 |       : LocationSummary::kNoCall; | 
 | 4491 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4492 |   if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) { | 
| Christina Wadsworth | 1fe89ea | 2016-08-31 16:14:38 -0700 | [diff] [blame] | 4493 |     InvokeRuntimeCallingConvention calling_convention; | 
 | 4494 |     locations->SetOut(calling_convention.GetReturnLocation(load->GetType())); | 
 | 4495 |   } else { | 
 | 4496 |     locations->SetOut(Location::RequiresRegister()); | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 4497 |     if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) { | 
 | 4498 |       if (!kUseReadBarrier || kUseBakerReadBarrier) { | 
 | 4499 |         // Rely on the pResolveString and/or marking to save everything, including temps. | 
 | 4500 |         RegisterSet caller_saves = RegisterSet::Empty(); | 
 | 4501 |         InvokeRuntimeCallingConvention calling_convention; | 
 | 4502 |         caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode())); | 
 | 4503 |         DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), | 
 | 4504 |                   RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot), | 
 | 4505 |                                Primitive::kPrimNot).GetCode()); | 
 | 4506 |         locations->SetCustomSlowPathCallerSaves(caller_saves); | 
 | 4507 |       } else { | 
 | 4508 |         // For non-Baker read barrier we have a temp-clobbering call. | 
 | 4509 |       } | 
 | 4510 |     } | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4511 |   } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4512 | } | 
 | 4513 |  | 
 | 4514 | void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4515 |   Register out = OutputRegister(load); | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 4516 |   Location out_loc = load->GetLocations()->Out(); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 4517 |  | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4518 |   switch (load->GetLoadKind()) { | 
 | 4519 |     case HLoadString::LoadKind::kBootImageLinkTimeAddress: | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4520 |       __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(), | 
 | 4521 |                                                               load->GetStringIndex())); | 
 | 4522 |       return;  // No dex cache slow path. | 
 | 4523 |     case HLoadString::LoadKind::kBootImageLinkTimePcRelative: { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4524 |       // Add ADRP with its PC-relative String patch. | 
 | 4525 |       const DexFile& dex_file = load->GetDexFile(); | 
 | 4526 |       uint32_t string_index = load->GetStringIndex(); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4527 |       DCHECK(codegen_->GetCompilerOptions().IsBootImage()); | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4528 |       vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4529 |       codegen_->EmitAdrpPlaceholder(adrp_label, out.X()); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4530 |       // Add ADD with its PC-relative String patch. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4531 |       vixl::aarch64::Label* add_label = | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4532 |           codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4533 |       codegen_->EmitAddPlaceholder(add_label, out.X(), out.X()); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4534 |       return;  // No dex cache slow path. | 
 | 4535 |     } | 
 | 4536 |     case HLoadString::LoadKind::kBootImageAddress: { | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4537 |       DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress())); | 
 | 4538 |       __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress())); | 
 | 4539 |       return;  // No dex cache slow path. | 
 | 4540 |     } | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4541 |     case HLoadString::LoadKind::kBssEntry: { | 
 | 4542 |       // Add ADRP with its PC-relative String .bss entry patch. | 
 | 4543 |       const DexFile& dex_file = load->GetDexFile(); | 
 | 4544 |       uint32_t string_index = load->GetStringIndex(); | 
 | 4545 |       DCHECK(!codegen_->GetCompilerOptions().IsBootImage()); | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 4546 |       UseScratchRegisterScope temps(codegen_->GetVIXLAssembler()); | 
 | 4547 |       Register temp = temps.AcquireX(); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4548 |       vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index); | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 4549 |       codegen_->EmitAdrpPlaceholder(adrp_label, temp); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4550 |       // Add LDR with its PC-relative String patch. | 
 | 4551 |       vixl::aarch64::Label* ldr_label = | 
 | 4552 |           codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label); | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 4553 |       // /* GcRoot<mirror::String> */ out = *(base_address + offset)  /* PC-relative */ | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4554 |       GenerateGcRootFieldLoad(load, | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 4555 |                               out_loc, | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 4556 |                               temp, | 
| Roland Levillain | 00468f3 | 2016-10-27 18:02:48 +0100 | [diff] [blame] | 4557 |                               /* offset placeholder */ 0u, | 
 | 4558 |                               ldr_label, | 
 | 4559 |                               kEmitCompilerReadBarrier); | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 4560 |       SlowPathCodeARM64* slow_path = | 
 | 4561 |           new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label); | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 4562 |       codegen_->AddSlowPath(slow_path); | 
 | 4563 |       __ Cbz(out.X(), slow_path->GetEntryLabel()); | 
 | 4564 |       __ Bind(slow_path->GetExitLabel()); | 
 | 4565 |       return; | 
 | 4566 |     } | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 4567 |     case HLoadString::LoadKind::kJitTableAddress: { | 
 | 4568 |       __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(), | 
 | 4569 |                                                         load->GetStringIndex())); | 
 | 4570 |       GenerateGcRootFieldLoad(load, | 
 | 4571 |                               out_loc, | 
 | 4572 |                               out.X(), | 
 | 4573 |                               /* offset */ 0, | 
 | 4574 |                               /* fixup_label */ nullptr, | 
 | 4575 |                               kEmitCompilerReadBarrier); | 
 | 4576 |       return; | 
 | 4577 |     } | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4578 |     default: | 
| Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 4579 |       break; | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 4580 |   } | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 4581 |  | 
| Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 4582 |   // TODO: Re-add the compiler code to do string dex cache lookup again. | 
| Christina Wadsworth | 1fe89ea | 2016-08-31 16:14:38 -0700 | [diff] [blame] | 4583 |   InvokeRuntimeCallingConvention calling_convention; | 
| Vladimir Marko | 94ce9c2 | 2016-09-30 14:50:51 +0100 | [diff] [blame] | 4584 |   DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode()); | 
| Christina Wadsworth | 1fe89ea | 2016-08-31 16:14:38 -0700 | [diff] [blame] | 4585 |   __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex()); | 
 | 4586 |   codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc()); | 
 | 4587 |   CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4588 | } | 
 | 4589 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4590 | void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) { | 
 | 4591 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant); | 
 | 4592 |   locations->SetOut(Location::ConstantLocation(constant)); | 
 | 4593 | } | 
 | 4594 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 4595 | void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4596 |   // Will be generated at use site. | 
 | 4597 | } | 
 | 4598 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4599 | void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) { | 
 | 4600 |   LocationSummary* locations = | 
| Serban Constantinescu | 54ff482 | 2016-07-07 18:03:19 +0100 | [diff] [blame] | 4601 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4602 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 4603 |   locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); | 
 | 4604 | } | 
 | 4605 |  | 
 | 4606 | void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) { | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 4607 |   codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject: kQuickUnlockObject, | 
 | 4608 |                           instruction, | 
 | 4609 |                           instruction->GetDexPc()); | 
| Roland Levillain | 888d067 | 2015-11-23 18:53:50 +0000 | [diff] [blame] | 4610 |   if (instruction->IsEnter()) { | 
 | 4611 |     CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>(); | 
 | 4612 |   } else { | 
 | 4613 |     CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>(); | 
 | 4614 |   } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4615 | } | 
 | 4616 |  | 
| Alexandre Rames | 42d641b | 2014-10-27 14:00:51 +0000 | [diff] [blame] | 4617 | void LocationsBuilderARM64::VisitMul(HMul* mul) { | 
 | 4618 |   LocationSummary* locations = | 
 | 4619 |       new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); | 
 | 4620 |   switch (mul->GetResultType()) { | 
 | 4621 |     case Primitive::kPrimInt: | 
 | 4622 |     case Primitive::kPrimLong: | 
 | 4623 |       locations->SetInAt(0, Location::RequiresRegister()); | 
 | 4624 |       locations->SetInAt(1, Location::RequiresRegister()); | 
| Alexandre Rames | fb4e5fa | 2014-11-06 12:41:16 +0000 | [diff] [blame] | 4625 |       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | 42d641b | 2014-10-27 14:00:51 +0000 | [diff] [blame] | 4626 |       break; | 
 | 4627 |  | 
 | 4628 |     case Primitive::kPrimFloat: | 
 | 4629 |     case Primitive::kPrimDouble: | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 4630 |       locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 4631 |       locations->SetInAt(1, Location::RequiresFpuRegister()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4632 |       locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | 42d641b | 2014-10-27 14:00:51 +0000 | [diff] [blame] | 4633 |       break; | 
 | 4634 |  | 
 | 4635 |     default: | 
 | 4636 |       LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); | 
 | 4637 |   } | 
 | 4638 | } | 
 | 4639 |  | 
 | 4640 | void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) { | 
 | 4641 |   switch (mul->GetResultType()) { | 
 | 4642 |     case Primitive::kPrimInt: | 
 | 4643 |     case Primitive::kPrimLong: | 
 | 4644 |       __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1)); | 
 | 4645 |       break; | 
 | 4646 |  | 
 | 4647 |     case Primitive::kPrimFloat: | 
 | 4648 |     case Primitive::kPrimDouble: | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 4649 |       __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1)); | 
| Alexandre Rames | 42d641b | 2014-10-27 14:00:51 +0000 | [diff] [blame] | 4650 |       break; | 
 | 4651 |  | 
 | 4652 |     default: | 
 | 4653 |       LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); | 
 | 4654 |   } | 
 | 4655 | } | 
 | 4656 |  | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4657 | void LocationsBuilderARM64::VisitNeg(HNeg* neg) { | 
 | 4658 |   LocationSummary* locations = | 
 | 4659 |       new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); | 
 | 4660 |   switch (neg->GetResultType()) { | 
 | 4661 |     case Primitive::kPrimInt: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4662 |     case Primitive::kPrimLong: | 
| Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 4663 |       locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg)); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4664 |       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4665 |       break; | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4666 |  | 
 | 4667 |     case Primitive::kPrimFloat: | 
 | 4668 |     case Primitive::kPrimDouble: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4669 |       locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 4670 |       locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4671 |       break; | 
 | 4672 |  | 
 | 4673 |     default: | 
 | 4674 |       LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); | 
 | 4675 |   } | 
 | 4676 | } | 
 | 4677 |  | 
 | 4678 | void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) { | 
 | 4679 |   switch (neg->GetResultType()) { | 
 | 4680 |     case Primitive::kPrimInt: | 
 | 4681 |     case Primitive::kPrimLong: | 
 | 4682 |       __ Neg(OutputRegister(neg), InputOperandAt(neg, 0)); | 
 | 4683 |       break; | 
 | 4684 |  | 
 | 4685 |     case Primitive::kPrimFloat: | 
 | 4686 |     case Primitive::kPrimDouble: | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4687 |       __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0)); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4688 |       break; | 
 | 4689 |  | 
 | 4690 |     default: | 
 | 4691 |       LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); | 
 | 4692 |   } | 
 | 4693 | } | 
 | 4694 |  | 
 | 4695 | void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) { | 
 | 4696 |   LocationSummary* locations = | 
| Serban Constantinescu | 54ff482 | 2016-07-07 18:03:19 +0100 | [diff] [blame] | 4697 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4698 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 4699 |   locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0))); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4700 |   locations->SetOut(LocationFrom(x0)); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 4701 |   locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1))); | 
| Nicolas Geoffray | 69aa601 | 2015-06-09 10:34:25 +0100 | [diff] [blame] | 4702 |   locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2))); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4703 | } | 
 | 4704 |  | 
 | 4705 | void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) { | 
 | 4706 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 4707 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 4708 |   Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt); | 
 | 4709 |   DCHECK(type_index.Is(w0)); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4710 |   __ Mov(type_index, instruction->GetTypeIndex()); | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 4711 |   // Note: if heap poisoning is enabled, the entry point takes cares | 
 | 4712 |   // of poisoning the reference. | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 4713 |   codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc()); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 4714 |   CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>(); | 
| Alexandre Rames | fc19de8 | 2014-11-07 17:13:31 +0000 | [diff] [blame] | 4715 | } | 
 | 4716 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4717 | void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) { | 
 | 4718 |   LocationSummary* locations = | 
| Serban Constantinescu | 54ff482 | 2016-07-07 18:03:19 +0100 | [diff] [blame] | 4719 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4720 |   InvokeRuntimeCallingConvention calling_convention; | 
| David Brazdil | 6de1938 | 2016-01-08 17:37:10 +0000 | [diff] [blame] | 4721 |   if (instruction->IsStringAlloc()) { | 
 | 4722 |     locations->AddTemp(LocationFrom(kArtMethodRegister)); | 
 | 4723 |   } else { | 
 | 4724 |     locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); | 
 | 4725 |     locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); | 
 | 4726 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4727 |   locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); | 
 | 4728 | } | 
 | 4729 |  | 
 | 4730 | void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) { | 
| Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 4731 |   // Note: if heap poisoning is enabled, the entry point takes cares | 
 | 4732 |   // of poisoning the reference. | 
| David Brazdil | 6de1938 | 2016-01-08 17:37:10 +0000 | [diff] [blame] | 4733 |   if (instruction->IsStringAlloc()) { | 
 | 4734 |     // String is allocated through StringFactory. Call NewEmptyString entry point. | 
 | 4735 |     Location temp = instruction->GetLocations()->GetTemp(0); | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 4736 |     MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize); | 
| David Brazdil | 6de1938 | 2016-01-08 17:37:10 +0000 | [diff] [blame] | 4737 |     __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString))); | 
 | 4738 |     __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value())); | 
 | 4739 |     __ Blr(lr); | 
 | 4740 |     codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); | 
 | 4741 |   } else { | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 4742 |     codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc()); | 
| David Brazdil | 6de1938 | 2016-01-08 17:37:10 +0000 | [diff] [blame] | 4743 |     CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>(); | 
 | 4744 |   } | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4745 | } | 
 | 4746 |  | 
 | 4747 | void LocationsBuilderARM64::VisitNot(HNot* instruction) { | 
 | 4748 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); | 
| Alexandre Rames | 4e59651 | 2014-11-07 15:56:50 +0000 | [diff] [blame] | 4749 |   locations->SetInAt(0, Location::RequiresRegister()); | 
| Alexandre Rames | fb4e5fa | 2014-11-06 12:41:16 +0000 | [diff] [blame] | 4750 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4751 | } | 
 | 4752 |  | 
 | 4753 | void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) { | 
| Nicolas Geoffray | d8ef2e9 | 2015-02-24 16:02:06 +0000 | [diff] [blame] | 4754 |   switch (instruction->GetResultType()) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4755 |     case Primitive::kPrimInt: | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4756 |     case Primitive::kPrimLong: | 
| Roland Levillain | 55dcfb5 | 2014-10-24 18:09:09 +0100 | [diff] [blame] | 4757 |       __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0)); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4758 |       break; | 
 | 4759 |  | 
 | 4760 |     default: | 
 | 4761 |       LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType(); | 
 | 4762 |   } | 
 | 4763 | } | 
 | 4764 |  | 
| David Brazdil | 66d126e | 2015-04-03 16:02:44 +0100 | [diff] [blame] | 4765 | void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) { | 
 | 4766 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); | 
 | 4767 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 4768 |   locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 4769 | } | 
 | 4770 |  | 
 | 4771 | void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) { | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 4772 |   __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1)); | 
| David Brazdil | 66d126e | 2015-04-03 16:02:44 +0100 | [diff] [blame] | 4773 | } | 
 | 4774 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4775 | void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) { | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 4776 |   LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction); | 
 | 4777 |   locations->SetInAt(0, Location::RequiresRegister()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4778 | } | 
 | 4779 |  | 
| Calin Juravle | 2ae4818 | 2016-03-16 14:05:09 +0000 | [diff] [blame] | 4780 | void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) { | 
 | 4781 |   if (CanMoveNullCheckToUser(instruction)) { | 
| Calin Juravle | 77520bc | 2015-01-12 18:45:46 +0000 | [diff] [blame] | 4782 |     return; | 
 | 4783 |   } | 
| Calin Juravle | cd6dffe | 2015-01-08 17:35:35 +0000 | [diff] [blame] | 4784 |  | 
| Alexandre Rames | d921d64 | 2015-04-16 15:07:16 +0100 | [diff] [blame] | 4785 |   BlockPoolsScope block_pools(GetVIXLAssembler()); | 
 | 4786 |   Location obj = instruction->GetLocations()->InAt(0); | 
| Calin Juravle | cd6dffe | 2015-01-08 17:35:35 +0000 | [diff] [blame] | 4787 |   __ Ldr(wzr, HeapOperandFrom(obj, Offset(0))); | 
| Calin Juravle | 2ae4818 | 2016-03-16 14:05:09 +0000 | [diff] [blame] | 4788 |   RecordPcInfo(instruction, instruction->GetDexPc()); | 
| Calin Juravle | cd6dffe | 2015-01-08 17:35:35 +0000 | [diff] [blame] | 4789 | } | 
 | 4790 |  | 
| Calin Juravle | 2ae4818 | 2016-03-16 14:05:09 +0000 | [diff] [blame] | 4791 | void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4792 |   SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction); | 
| Calin Juravle | 2ae4818 | 2016-03-16 14:05:09 +0000 | [diff] [blame] | 4793 |   AddSlowPath(slow_path); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4794 |  | 
 | 4795 |   LocationSummary* locations = instruction->GetLocations(); | 
 | 4796 |   Location obj = locations->InAt(0); | 
| Calin Juravle | 77520bc | 2015-01-12 18:45:46 +0000 | [diff] [blame] | 4797 |  | 
 | 4798 |   __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4799 | } | 
 | 4800 |  | 
| Calin Juravle | cd6dffe | 2015-01-08 17:35:35 +0000 | [diff] [blame] | 4801 | void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) { | 
| Calin Juravle | 2ae4818 | 2016-03-16 14:05:09 +0000 | [diff] [blame] | 4802 |   codegen_->GenerateNullCheck(instruction); | 
| Calin Juravle | cd6dffe | 2015-01-08 17:35:35 +0000 | [diff] [blame] | 4803 | } | 
 | 4804 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4805 | void LocationsBuilderARM64::VisitOr(HOr* instruction) { | 
 | 4806 |   HandleBinaryOp(instruction); | 
 | 4807 | } | 
 | 4808 |  | 
 | 4809 | void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) { | 
 | 4810 |   HandleBinaryOp(instruction); | 
 | 4811 | } | 
 | 4812 |  | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 4813 | void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) { | 
 | 4814 |   LOG(FATAL) << "Unreachable"; | 
 | 4815 | } | 
 | 4816 |  | 
 | 4817 | void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) { | 
 | 4818 |   codegen_->GetMoveResolver()->EmitNativeCode(instruction); | 
 | 4819 | } | 
 | 4820 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4821 | void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) { | 
 | 4822 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); | 
 | 4823 |   Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); | 
 | 4824 |   if (location.IsStackSlot()) { | 
 | 4825 |     location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); | 
 | 4826 |   } else if (location.IsDoubleStackSlot()) { | 
 | 4827 |     location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); | 
 | 4828 |   } | 
 | 4829 |   locations->SetOut(location); | 
 | 4830 | } | 
 | 4831 |  | 
| Nicolas Geoffray | 76b1e17 | 2015-05-27 17:18:33 +0100 | [diff] [blame] | 4832 | void InstructionCodeGeneratorARM64::VisitParameterValue( | 
 | 4833 |     HParameterValue* instruction ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4834 |   // Nothing to do, the parameter is already at its location. | 
| Nicolas Geoffray | 76b1e17 | 2015-05-27 17:18:33 +0100 | [diff] [blame] | 4835 | } | 
 | 4836 |  | 
 | 4837 | void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) { | 
 | 4838 |   LocationSummary* locations = | 
 | 4839 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); | 
| Nicolas Geoffray | 38207af | 2015-06-01 15:46:22 +0100 | [diff] [blame] | 4840 |   locations->SetOut(LocationFrom(kArtMethodRegister)); | 
| Nicolas Geoffray | 76b1e17 | 2015-05-27 17:18:33 +0100 | [diff] [blame] | 4841 | } | 
 | 4842 |  | 
 | 4843 | void InstructionCodeGeneratorARM64::VisitCurrentMethod( | 
 | 4844 |     HCurrentMethod* instruction ATTRIBUTE_UNUSED) { | 
 | 4845 |   // Nothing to do, the method is already at its location. | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4846 | } | 
 | 4847 |  | 
 | 4848 | void LocationsBuilderARM64::VisitPhi(HPhi* instruction) { | 
 | 4849 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); | 
| Vladimir Marko | 372f10e | 2016-05-17 16:30:10 +0100 | [diff] [blame] | 4850 |   for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4851 |     locations->SetInAt(i, Location::Any()); | 
 | 4852 |   } | 
 | 4853 |   locations->SetOut(Location::Any()); | 
 | 4854 | } | 
 | 4855 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 4856 | void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4857 |   LOG(FATAL) << "Unreachable"; | 
 | 4858 | } | 
 | 4859 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4860 | void LocationsBuilderARM64::VisitRem(HRem* rem) { | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 4861 |   Primitive::Type type = rem->GetResultType(); | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 4862 |   LocationSummary::CallKind call_kind = | 
| Serban Constantinescu | 54ff482 | 2016-07-07 18:03:19 +0100 | [diff] [blame] | 4863 |       Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly | 
 | 4864 |                                            : LocationSummary::kNoCall; | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 4865 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind); | 
 | 4866 |  | 
 | 4867 |   switch (type) { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4868 |     case Primitive::kPrimInt: | 
 | 4869 |     case Primitive::kPrimLong: | 
 | 4870 |       locations->SetInAt(0, Location::RequiresRegister()); | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 4871 |       locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1))); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4872 |       locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 4873 |       break; | 
 | 4874 |  | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 4875 |     case Primitive::kPrimFloat: | 
 | 4876 |     case Primitive::kPrimDouble: { | 
 | 4877 |       InvokeRuntimeCallingConvention calling_convention; | 
 | 4878 |       locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0))); | 
 | 4879 |       locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1))); | 
 | 4880 |       locations->SetOut(calling_convention.GetReturnLocation(type)); | 
 | 4881 |  | 
 | 4882 |       break; | 
 | 4883 |     } | 
 | 4884 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4885 |     default: | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 4886 |       LOG(FATAL) << "Unexpected rem type " << type; | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4887 |   } | 
 | 4888 | } | 
 | 4889 |  | 
 | 4890 | void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) { | 
 | 4891 |   Primitive::Type type = rem->GetResultType(); | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 4892 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4893 |   switch (type) { | 
 | 4894 |     case Primitive::kPrimInt: | 
 | 4895 |     case Primitive::kPrimLong: { | 
| Zheng Xu | c666710 | 2015-05-15 16:08:45 +0800 | [diff] [blame] | 4896 |       GenerateDivRemIntegral(rem); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4897 |       break; | 
 | 4898 |     } | 
 | 4899 |  | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 4900 |     case Primitive::kPrimFloat: | 
 | 4901 |     case Primitive::kPrimDouble: { | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 4902 |       QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod; | 
 | 4903 |       codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc()); | 
| Roland Levillain | 888d067 | 2015-11-23 18:53:50 +0000 | [diff] [blame] | 4904 |       if (type == Primitive::kPrimFloat) { | 
 | 4905 |         CheckEntrypointTypes<kQuickFmodf, float, float, float>(); | 
 | 4906 |       } else { | 
 | 4907 |         CheckEntrypointTypes<kQuickFmod, double, double, double>(); | 
 | 4908 |       } | 
| Serban Constantinescu | 02d81cc | 2015-01-05 16:08:49 +0000 | [diff] [blame] | 4909 |       break; | 
 | 4910 |     } | 
 | 4911 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4912 |     default: | 
 | 4913 |       LOG(FATAL) << "Unexpected rem type " << type; | 
| Vladimir Marko | 351dddf | 2015-12-11 16:34:46 +0000 | [diff] [blame] | 4914 |       UNREACHABLE(); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4915 |   } | 
 | 4916 | } | 
 | 4917 |  | 
| Calin Juravle | 27df758 | 2015-04-17 19:12:31 +0100 | [diff] [blame] | 4918 | void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) { | 
 | 4919 |   memory_barrier->SetLocations(nullptr); | 
 | 4920 | } | 
 | 4921 |  | 
 | 4922 | void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 4923 |   codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind()); | 
| Calin Juravle | 27df758 | 2015-04-17 19:12:31 +0100 | [diff] [blame] | 4924 | } | 
 | 4925 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4926 | void LocationsBuilderARM64::VisitReturn(HReturn* instruction) { | 
 | 4927 |   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); | 
 | 4928 |   Primitive::Type return_type = instruction->InputAt(0)->GetType(); | 
| Alexandre Rames | a89086e | 2014-11-07 17:13:25 +0000 | [diff] [blame] | 4929 |   locations->SetInAt(0, ARM64ReturnLocation(return_type)); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4930 | } | 
 | 4931 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 4932 | void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4933 |   codegen_->GenerateFrameExit(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4934 | } | 
 | 4935 |  | 
 | 4936 | void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) { | 
 | 4937 |   instruction->SetLocations(nullptr); | 
 | 4938 | } | 
 | 4939 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 4940 | void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4941 |   codegen_->GenerateFrameExit(); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4942 | } | 
 | 4943 |  | 
| Scott Wakeling | 40a04bf | 2015-12-11 09:50:36 +0000 | [diff] [blame] | 4944 | void LocationsBuilderARM64::VisitRor(HRor* ror) { | 
 | 4945 |   HandleBinaryOp(ror); | 
 | 4946 | } | 
 | 4947 |  | 
 | 4948 | void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) { | 
 | 4949 |   HandleBinaryOp(ror); | 
 | 4950 | } | 
 | 4951 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 4952 | void LocationsBuilderARM64::VisitShl(HShl* shl) { | 
 | 4953 |   HandleShift(shl); | 
 | 4954 | } | 
 | 4955 |  | 
 | 4956 | void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) { | 
 | 4957 |   HandleShift(shl); | 
 | 4958 | } | 
 | 4959 |  | 
 | 4960 | void LocationsBuilderARM64::VisitShr(HShr* shr) { | 
 | 4961 |   HandleShift(shr); | 
 | 4962 | } | 
 | 4963 |  | 
 | 4964 | void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) { | 
 | 4965 |   HandleShift(shr); | 
 | 4966 | } | 
 | 4967 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4968 | void LocationsBuilderARM64::VisitSub(HSub* instruction) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4969 |   HandleBinaryOp(instruction); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4970 | } | 
 | 4971 |  | 
 | 4972 | void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4973 |   HandleBinaryOp(instruction); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4974 | } | 
 | 4975 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4976 | void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 4977 |   HandleFieldGet(instruction); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4978 | } | 
 | 4979 |  | 
 | 4980 | void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 4981 |   HandleFieldGet(instruction, instruction->GetFieldInfo()); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4982 | } | 
 | 4983 |  | 
 | 4984 | void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) { | 
| Alexandre Rames | 09a9996 | 2015-04-15 11:47:56 +0100 | [diff] [blame] | 4985 |   HandleFieldSet(instruction); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4986 | } | 
 | 4987 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 4988 | void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) { | 
| Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 4989 |   HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull()); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 4990 | } | 
 | 4991 |  | 
| Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 4992 | void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet( | 
 | 4993 |     HUnresolvedInstanceFieldGet* instruction) { | 
 | 4994 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 4995 |   codegen_->CreateUnresolvedFieldLocationSummary( | 
 | 4996 |       instruction, instruction->GetFieldType(), calling_convention); | 
 | 4997 | } | 
 | 4998 |  | 
 | 4999 | void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet( | 
 | 5000 |     HUnresolvedInstanceFieldGet* instruction) { | 
 | 5001 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 5002 |   codegen_->GenerateUnresolvedFieldAccess(instruction, | 
 | 5003 |                                           instruction->GetFieldType(), | 
 | 5004 |                                           instruction->GetFieldIndex(), | 
 | 5005 |                                           instruction->GetDexPc(), | 
 | 5006 |                                           calling_convention); | 
 | 5007 | } | 
 | 5008 |  | 
 | 5009 | void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet( | 
 | 5010 |     HUnresolvedInstanceFieldSet* instruction) { | 
 | 5011 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 5012 |   codegen_->CreateUnresolvedFieldLocationSummary( | 
 | 5013 |       instruction, instruction->GetFieldType(), calling_convention); | 
 | 5014 | } | 
 | 5015 |  | 
 | 5016 | void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet( | 
 | 5017 |     HUnresolvedInstanceFieldSet* instruction) { | 
 | 5018 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 5019 |   codegen_->GenerateUnresolvedFieldAccess(instruction, | 
 | 5020 |                                           instruction->GetFieldType(), | 
 | 5021 |                                           instruction->GetFieldIndex(), | 
 | 5022 |                                           instruction->GetDexPc(), | 
 | 5023 |                                           calling_convention); | 
 | 5024 | } | 
 | 5025 |  | 
 | 5026 | void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet( | 
 | 5027 |     HUnresolvedStaticFieldGet* instruction) { | 
 | 5028 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 5029 |   codegen_->CreateUnresolvedFieldLocationSummary( | 
 | 5030 |       instruction, instruction->GetFieldType(), calling_convention); | 
 | 5031 | } | 
 | 5032 |  | 
 | 5033 | void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet( | 
 | 5034 |     HUnresolvedStaticFieldGet* instruction) { | 
 | 5035 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 5036 |   codegen_->GenerateUnresolvedFieldAccess(instruction, | 
 | 5037 |                                           instruction->GetFieldType(), | 
 | 5038 |                                           instruction->GetFieldIndex(), | 
 | 5039 |                                           instruction->GetDexPc(), | 
 | 5040 |                                           calling_convention); | 
 | 5041 | } | 
 | 5042 |  | 
 | 5043 | void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet( | 
 | 5044 |     HUnresolvedStaticFieldSet* instruction) { | 
 | 5045 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 5046 |   codegen_->CreateUnresolvedFieldLocationSummary( | 
 | 5047 |       instruction, instruction->GetFieldType(), calling_convention); | 
 | 5048 | } | 
 | 5049 |  | 
 | 5050 | void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet( | 
 | 5051 |     HUnresolvedStaticFieldSet* instruction) { | 
 | 5052 |   FieldAccessCallingConventionARM64 calling_convention; | 
 | 5053 |   codegen_->GenerateUnresolvedFieldAccess(instruction, | 
 | 5054 |                                           instruction->GetFieldType(), | 
 | 5055 |                                           instruction->GetFieldIndex(), | 
 | 5056 |                                           instruction->GetDexPc(), | 
 | 5057 |                                           calling_convention); | 
 | 5058 | } | 
 | 5059 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 5060 | void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) { | 
| Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 5061 |   LocationSummary* locations = | 
 | 5062 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); | 
| Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 5063 |   locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty());  // No caller-save registers. | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 5064 | } | 
 | 5065 |  | 
 | 5066 | void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 5067 |   HBasicBlock* block = instruction->GetBlock(); | 
 | 5068 |   if (block->GetLoopInformation() != nullptr) { | 
 | 5069 |     DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); | 
 | 5070 |     // The back edge will generate the suspend check. | 
 | 5071 |     return; | 
 | 5072 |   } | 
 | 5073 |   if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { | 
 | 5074 |     // The goto will generate the suspend check. | 
 | 5075 |     return; | 
 | 5076 |   } | 
 | 5077 |   GenerateSuspendCheck(instruction, nullptr); | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 5078 | } | 
 | 5079 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5080 | void LocationsBuilderARM64::VisitThrow(HThrow* instruction) { | 
 | 5081 |   LocationSummary* locations = | 
| Serban Constantinescu | 54ff482 | 2016-07-07 18:03:19 +0100 | [diff] [blame] | 5082 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5083 |   InvokeRuntimeCallingConvention calling_convention; | 
 | 5084 |   locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); | 
 | 5085 | } | 
 | 5086 |  | 
 | 5087 | void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) { | 
| Serban Constantinescu | 22f81d3 | 2016-02-18 16:06:31 +0000 | [diff] [blame] | 5088 |   codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc()); | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 5089 |   CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>(); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5090 | } | 
 | 5091 |  | 
 | 5092 | void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) { | 
 | 5093 |   LocationSummary* locations = | 
 | 5094 |       new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall); | 
 | 5095 |   Primitive::Type input_type = conversion->GetInputType(); | 
 | 5096 |   Primitive::Type result_type = conversion->GetResultType(); | 
| Nicolas Geoffray | 01fcc9e | 2014-12-01 14:16:20 +0000 | [diff] [blame] | 5097 |   DCHECK_NE(input_type, result_type); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5098 |   if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) || | 
 | 5099 |       (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) { | 
 | 5100 |     LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type; | 
 | 5101 |   } | 
 | 5102 |  | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 5103 |   if (Primitive::IsFloatingPointType(input_type)) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5104 |     locations->SetInAt(0, Location::RequiresFpuRegister()); | 
 | 5105 |   } else { | 
 | 5106 |     locations->SetInAt(0, Location::RequiresRegister()); | 
 | 5107 |   } | 
 | 5108 |  | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 5109 |   if (Primitive::IsFloatingPointType(result_type)) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5110 |     locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); | 
 | 5111 |   } else { | 
 | 5112 |     locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); | 
 | 5113 |   } | 
 | 5114 | } | 
 | 5115 |  | 
 | 5116 | void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) { | 
 | 5117 |   Primitive::Type result_type = conversion->GetResultType(); | 
 | 5118 |   Primitive::Type input_type = conversion->GetInputType(); | 
 | 5119 |  | 
 | 5120 |   DCHECK_NE(input_type, result_type); | 
 | 5121 |  | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 5122 |   if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) { | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5123 |     int result_size = Primitive::ComponentSize(result_type); | 
 | 5124 |     int input_size = Primitive::ComponentSize(input_type); | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 5125 |     int min_size = std::min(result_size, input_size); | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 5126 |     Register output = OutputRegister(conversion); | 
 | 5127 |     Register source = InputRegisterAt(conversion, 0); | 
| Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 5128 |     if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) { | 
| Alexandre Rames | 4dff2fd | 2015-08-20 13:36:35 +0100 | [diff] [blame] | 5129 |       // 'int' values are used directly as W registers, discarding the top | 
 | 5130 |       // bits, so we don't need to sign-extend and can just perform a move. | 
 | 5131 |       // We do not pass the `kDiscardForSameWReg` argument to force clearing the | 
 | 5132 |       // top 32 bits of the target register. We theoretically could leave those | 
 | 5133 |       // bits unchanged, but we would have to make sure that no code uses a | 
 | 5134 |       // 32bit input value as a 64bit value assuming that the top 32 bits are | 
 | 5135 |       // zero. | 
 | 5136 |       __ Mov(output.W(), source.W()); | 
| Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 5137 |     } else if (result_type == Primitive::kPrimChar || | 
 | 5138 |                (input_type == Primitive::kPrimChar && input_size < result_size)) { | 
 | 5139 |       __ Ubfx(output, | 
 | 5140 |               output.IsX() ? source.X() : source.W(), | 
 | 5141 |               0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5142 |     } else { | 
| Alexandre Rames | 3e69f16 | 2014-12-10 10:36:50 +0000 | [diff] [blame] | 5143 |       __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5144 |     } | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 5145 |   } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 5146 |     __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0)); | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 5147 |   } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 5148 |     CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong); | 
 | 5149 |     __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0)); | 
| Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 5150 |   } else if (Primitive::IsFloatingPointType(result_type) && | 
 | 5151 |              Primitive::IsFloatingPointType(input_type)) { | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 5152 |     __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0)); | 
 | 5153 |   } else { | 
 | 5154 |     LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type | 
 | 5155 |                 << " to " << result_type; | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5156 |   } | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 5157 | } | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5158 |  | 
| Serban Constantinescu | 02164b3 | 2014-11-13 14:05:07 +0000 | [diff] [blame] | 5159 | void LocationsBuilderARM64::VisitUShr(HUShr* ushr) { | 
 | 5160 |   HandleShift(ushr); | 
 | 5161 | } | 
 | 5162 |  | 
 | 5163 | void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) { | 
 | 5164 |   HandleShift(ushr); | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5165 | } | 
 | 5166 |  | 
 | 5167 | void LocationsBuilderARM64::VisitXor(HXor* instruction) { | 
 | 5168 |   HandleBinaryOp(instruction); | 
 | 5169 | } | 
 | 5170 |  | 
 | 5171 | void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) { | 
 | 5172 |   HandleBinaryOp(instruction); | 
 | 5173 | } | 
 | 5174 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 5175 | void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { | 
| Calin Juravle | b1498f6 | 2015-02-16 13:13:29 +0000 | [diff] [blame] | 5176 |   // Nothing to do, this should be removed during prepare for register allocator. | 
| Calin Juravle | b1498f6 | 2015-02-16 13:13:29 +0000 | [diff] [blame] | 5177 |   LOG(FATAL) << "Unreachable"; | 
 | 5178 | } | 
 | 5179 |  | 
| Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 5180 | void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { | 
| Calin Juravle | b1498f6 | 2015-02-16 13:13:29 +0000 | [diff] [blame] | 5181 |   // Nothing to do, this should be removed during prepare for register allocator. | 
| Calin Juravle | b1498f6 | 2015-02-16 13:13:29 +0000 | [diff] [blame] | 5182 |   LOG(FATAL) << "Unreachable"; | 
 | 5183 | } | 
 | 5184 |  | 
| Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 5185 | // Simple implementation of packed switch - generate cascaded compare/jumps. | 
 | 5186 | void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) { | 
 | 5187 |   LocationSummary* locations = | 
 | 5188 |       new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall); | 
 | 5189 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 5190 | } | 
 | 5191 |  | 
 | 5192 | void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) { | 
 | 5193 |   int32_t lower_bound = switch_instr->GetStartValue(); | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 5194 |   uint32_t num_entries = switch_instr->GetNumEntries(); | 
| Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 5195 |   Register value_reg = InputRegisterAt(switch_instr, 0); | 
 | 5196 |   HBasicBlock* default_block = switch_instr->GetDefaultBlock(); | 
 | 5197 |  | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 5198 |   // Roughly set 16 as max average assemblies generated per HIR in a graph. | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 5199 |   static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize; | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 5200 |   // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to | 
 | 5201 |   // make sure we don't emit it if the target may run out of range. | 
 | 5202 |   // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR | 
 | 5203 |   // ranges and emit the tables only as required. | 
 | 5204 |   static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction; | 
| Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 5205 |  | 
| Vladimir Marko | f3e0ee2 | 2015-12-17 15:23:13 +0000 | [diff] [blame] | 5206 |   if (num_entries <= kPackedSwitchCompareJumpThreshold || | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 5207 |       // Current instruction id is an upper bound of the number of HIRs in the graph. | 
 | 5208 |       GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) { | 
 | 5209 |     // Create a series of compare/jumps. | 
| Vladimir Marko | f3e0ee2 | 2015-12-17 15:23:13 +0000 | [diff] [blame] | 5210 |     UseScratchRegisterScope temps(codegen_->GetVIXLAssembler()); | 
 | 5211 |     Register temp = temps.AcquireW(); | 
 | 5212 |     __ Subs(temp, value_reg, Operand(lower_bound)); | 
 | 5213 |  | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 5214 |     const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors(); | 
| Vladimir Marko | f3e0ee2 | 2015-12-17 15:23:13 +0000 | [diff] [blame] | 5215 |     // Jump to successors[0] if value == lower_bound. | 
 | 5216 |     __ B(eq, codegen_->GetLabelOf(successors[0])); | 
 | 5217 |     int32_t last_index = 0; | 
 | 5218 |     for (; num_entries - last_index > 2; last_index += 2) { | 
 | 5219 |       __ Subs(temp, temp, Operand(2)); | 
 | 5220 |       // Jump to successors[last_index + 1] if value < case_value[last_index + 2]. | 
 | 5221 |       __ B(lo, codegen_->GetLabelOf(successors[last_index + 1])); | 
 | 5222 |       // Jump to successors[last_index + 2] if value == case_value[last_index + 2]. | 
 | 5223 |       __ B(eq, codegen_->GetLabelOf(successors[last_index + 2])); | 
 | 5224 |     } | 
 | 5225 |     if (num_entries - last_index == 2) { | 
 | 5226 |       // The last missing case_value. | 
 | 5227 |       __ Cmp(temp, Operand(1)); | 
 | 5228 |       __ B(eq, codegen_->GetLabelOf(successors[last_index + 1])); | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 5229 |     } | 
 | 5230 |  | 
 | 5231 |     // And the default for any other value. | 
 | 5232 |     if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) { | 
 | 5233 |       __ B(codegen_->GetLabelOf(default_block)); | 
 | 5234 |     } | 
 | 5235 |   } else { | 
| Alexandre Rames | c01a664 | 2016-04-15 11:54:06 +0100 | [diff] [blame] | 5236 |     JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr); | 
| Zheng Xu | 3927c8b | 2015-11-18 17:46:25 +0800 | [diff] [blame] | 5237 |  | 
 | 5238 |     UseScratchRegisterScope temps(codegen_->GetVIXLAssembler()); | 
 | 5239 |  | 
 | 5240 |     // Below instructions should use at most one blocked register. Since there are two blocked | 
 | 5241 |     // registers, we are free to block one. | 
 | 5242 |     Register temp_w = temps.AcquireW(); | 
 | 5243 |     Register index; | 
 | 5244 |     // Remove the bias. | 
 | 5245 |     if (lower_bound != 0) { | 
 | 5246 |       index = temp_w; | 
 | 5247 |       __ Sub(index, value_reg, Operand(lower_bound)); | 
 | 5248 |     } else { | 
 | 5249 |       index = value_reg; | 
 | 5250 |     } | 
 | 5251 |  | 
 | 5252 |     // Jump to default block if index is out of the range. | 
 | 5253 |     __ Cmp(index, Operand(num_entries)); | 
 | 5254 |     __ B(hs, codegen_->GetLabelOf(default_block)); | 
 | 5255 |  | 
 | 5256 |     // In current VIXL implementation, it won't require any blocked registers to encode the | 
 | 5257 |     // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the | 
 | 5258 |     // register pressure. | 
 | 5259 |     Register table_base = temps.AcquireX(); | 
 | 5260 |     // Load jump offset from the table. | 
 | 5261 |     __ Adr(table_base, jump_table->GetTableStartLabel()); | 
 | 5262 |     Register jump_offset = temp_w; | 
 | 5263 |     __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2)); | 
 | 5264 |  | 
 | 5265 |     // Jump to target block by branching to table_base(pc related) + offset. | 
 | 5266 |     Register target_address = table_base; | 
 | 5267 |     __ Add(target_address, table_base, Operand(jump_offset, SXTW)); | 
 | 5268 |     __ Br(target_address); | 
| Mark Mendell | fe57faa | 2015-09-18 09:26:15 -0400 | [diff] [blame] | 5269 |   } | 
 | 5270 | } | 
 | 5271 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5272 | void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction, | 
 | 5273 |                                                                      Location out, | 
 | 5274 |                                                                      uint32_t offset, | 
 | 5275 |                                                                      Location maybe_temp) { | 
 | 5276 |   Primitive::Type type = Primitive::kPrimNot; | 
 | 5277 |   Register out_reg = RegisterFrom(out, type); | 
 | 5278 |   if (kEmitCompilerReadBarrier) { | 
 | 5279 |     Register temp_reg = RegisterFrom(maybe_temp, type); | 
 | 5280 |     if (kUseBakerReadBarrier) { | 
 | 5281 |       // Load with fast path based Baker's read barrier. | 
 | 5282 |       // /* HeapReference<Object> */ out = *(out + offset) | 
 | 5283 |       codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, | 
 | 5284 |                                                       out, | 
 | 5285 |                                                       out_reg, | 
 | 5286 |                                                       offset, | 
 | 5287 |                                                       temp_reg, | 
 | 5288 |                                                       /* needs_null_check */ false, | 
 | 5289 |                                                       /* use_load_acquire */ false); | 
 | 5290 |     } else { | 
 | 5291 |       // Load with slow path based read barrier. | 
 | 5292 |       // Save the value of `out` into `maybe_temp` before overwriting it | 
 | 5293 |       // in the following move operation, as we will need it for the | 
 | 5294 |       // read barrier below. | 
 | 5295 |       __ Mov(temp_reg, out_reg); | 
 | 5296 |       // /* HeapReference<Object> */ out = *(out + offset) | 
 | 5297 |       __ Ldr(out_reg, HeapOperand(out_reg, offset)); | 
 | 5298 |       codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset); | 
 | 5299 |     } | 
 | 5300 |   } else { | 
 | 5301 |     // Plain load with no read barrier. | 
 | 5302 |     // /* HeapReference<Object> */ out = *(out + offset) | 
 | 5303 |     __ Ldr(out_reg, HeapOperand(out_reg, offset)); | 
 | 5304 |     GetAssembler()->MaybeUnpoisonHeapReference(out_reg); | 
 | 5305 |   } | 
 | 5306 | } | 
 | 5307 |  | 
 | 5308 | void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction, | 
 | 5309 |                                                                       Location out, | 
 | 5310 |                                                                       Location obj, | 
 | 5311 |                                                                       uint32_t offset, | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 5312 |                                                                       Location maybe_temp, | 
 | 5313 |                                                                       bool emit_read_barrier) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5314 |   Primitive::Type type = Primitive::kPrimNot; | 
 | 5315 |   Register out_reg = RegisterFrom(out, type); | 
 | 5316 |   Register obj_reg = RegisterFrom(obj, type); | 
| Mathieu Chartier | 5c44c1b | 2016-11-04 18:13:04 -0700 | [diff] [blame^] | 5317 |   if (emit_read_barrier) { | 
 | 5318 |     DCHECK(kEmitCompilerReadBarrier); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5319 |     if (kUseBakerReadBarrier) { | 
 | 5320 |       // Load with fast path based Baker's read barrier. | 
 | 5321 |       Register temp_reg = RegisterFrom(maybe_temp, type); | 
 | 5322 |       // /* HeapReference<Object> */ out = *(obj + offset) | 
 | 5323 |       codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, | 
 | 5324 |                                                       out, | 
 | 5325 |                                                       obj_reg, | 
 | 5326 |                                                       offset, | 
 | 5327 |                                                       temp_reg, | 
 | 5328 |                                                       /* needs_null_check */ false, | 
 | 5329 |                                                       /* use_load_acquire */ false); | 
 | 5330 |     } else { | 
 | 5331 |       // Load with slow path based read barrier. | 
 | 5332 |       // /* HeapReference<Object> */ out = *(obj + offset) | 
 | 5333 |       __ Ldr(out_reg, HeapOperand(obj_reg, offset)); | 
 | 5334 |       codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset); | 
 | 5335 |     } | 
 | 5336 |   } else { | 
 | 5337 |     // Plain load with no read barrier. | 
 | 5338 |     // /* HeapReference<Object> */ out = *(obj + offset) | 
 | 5339 |     __ Ldr(out_reg, HeapOperand(obj_reg, offset)); | 
 | 5340 |     GetAssembler()->MaybeUnpoisonHeapReference(out_reg); | 
 | 5341 |   } | 
 | 5342 | } | 
 | 5343 |  | 
 | 5344 | void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction, | 
 | 5345 |                                                             Location root, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 5346 |                                                             Register obj, | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 5347 |                                                             uint32_t offset, | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 5348 |                                                             vixl::aarch64::Label* fixup_label, | 
 | 5349 |                                                             bool requires_read_barrier) { | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 5350 |   DCHECK(fixup_label == nullptr || offset == 0u); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5351 |   Register root_reg = RegisterFrom(root, Primitive::kPrimNot); | 
| Mathieu Chartier | 31b12e3 | 2016-09-02 17:11:57 -0700 | [diff] [blame] | 5352 |   if (requires_read_barrier) { | 
 | 5353 |     DCHECK(kEmitCompilerReadBarrier); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5354 |     if (kUseBakerReadBarrier) { | 
 | 5355 |       // Fast path implementation of art::ReadBarrier::BarrierForRoot when | 
 | 5356 |       // Baker's read barrier are used: | 
 | 5357 |       // | 
 | 5358 |       //   root = obj.field; | 
 | 5359 |       //   if (Thread::Current()->GetIsGcMarking()) { | 
 | 5360 |       //     root = ReadBarrier::Mark(root) | 
 | 5361 |       //   } | 
 | 5362 |  | 
 | 5363 |       // /* GcRoot<mirror::Object> */ root = *(obj + offset) | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 5364 |       if (fixup_label == nullptr) { | 
 | 5365 |         __ Ldr(root_reg, MemOperand(obj, offset)); | 
 | 5366 |       } else { | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 5367 |         codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 5368 |       } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5369 |       static_assert( | 
 | 5370 |           sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>), | 
 | 5371 |           "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> " | 
 | 5372 |           "have different sizes."); | 
 | 5373 |       static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t), | 
 | 5374 |                     "art::mirror::CompressedReference<mirror::Object> and int32_t " | 
 | 5375 |                     "have different sizes."); | 
 | 5376 |  | 
| Vladimir Marko | 953437b | 2016-08-24 08:30:46 +0000 | [diff] [blame] | 5377 |       // Slow path marking the GC root `root`. | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5378 |       SlowPathCodeARM64* slow_path = | 
| Roland Levillain | 02b7580 | 2016-07-13 11:54:35 +0100 | [diff] [blame] | 5379 |           new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5380 |       codegen_->AddSlowPath(slow_path); | 
 | 5381 |  | 
 | 5382 |       MacroAssembler* masm = GetVIXLAssembler(); | 
 | 5383 |       UseScratchRegisterScope temps(masm); | 
 | 5384 |       Register temp = temps.AcquireW(); | 
 | 5385 |       // temp = Thread::Current()->GetIsGcMarking() | 
| Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 5386 |       __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value())); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5387 |       __ Cbnz(temp, slow_path->GetEntryLabel()); | 
 | 5388 |       __ Bind(slow_path->GetExitLabel()); | 
 | 5389 |     } else { | 
 | 5390 |       // GC root loaded through a slow path for read barriers other | 
 | 5391 |       // than Baker's. | 
 | 5392 |       // /* GcRoot<mirror::Object>* */ root = obj + offset | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 5393 |       if (fixup_label == nullptr) { | 
 | 5394 |         __ Add(root_reg.X(), obj.X(), offset); | 
 | 5395 |       } else { | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 5396 |         codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X()); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 5397 |       } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5398 |       // /* mirror::Object* */ root = root->Read() | 
 | 5399 |       codegen_->GenerateReadBarrierForRootSlow(instruction, root, root); | 
 | 5400 |     } | 
 | 5401 |   } else { | 
 | 5402 |     // Plain GC root load with no read barrier. | 
 | 5403 |     // /* GcRoot<mirror::Object> */ root = *(obj + offset) | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 5404 |     if (fixup_label == nullptr) { | 
 | 5405 |       __ Ldr(root_reg, MemOperand(obj, offset)); | 
 | 5406 |     } else { | 
| Vladimir Marko | aad75c6 | 2016-10-03 08:46:48 +0000 | [diff] [blame] | 5407 |       codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X()); | 
| Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 5408 |     } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5409 |     // Note that GC roots are not affected by heap poisoning, thus we | 
 | 5410 |     // do not have to unpoison `root_reg` here. | 
 | 5411 |   } | 
 | 5412 | } | 
 | 5413 |  | 
 | 5414 | void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction, | 
 | 5415 |                                                                Location ref, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 5416 |                                                                Register obj, | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5417 |                                                                uint32_t offset, | 
 | 5418 |                                                                Register temp, | 
 | 5419 |                                                                bool needs_null_check, | 
 | 5420 |                                                                bool use_load_acquire) { | 
 | 5421 |   DCHECK(kEmitCompilerReadBarrier); | 
 | 5422 |   DCHECK(kUseBakerReadBarrier); | 
 | 5423 |  | 
 | 5424 |   // /* HeapReference<Object> */ ref = *(obj + offset) | 
 | 5425 |   Location no_index = Location::NoLocation(); | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 5426 |   size_t no_scale_factor = 0u; | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5427 |   GenerateReferenceLoadWithBakerReadBarrier(instruction, | 
 | 5428 |                                             ref, | 
 | 5429 |                                             obj, | 
 | 5430 |                                             offset, | 
 | 5431 |                                             no_index, | 
 | 5432 |                                             no_scale_factor, | 
 | 5433 |                                             temp, | 
 | 5434 |                                             needs_null_check, | 
 | 5435 |                                             use_load_acquire); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5436 | } | 
 | 5437 |  | 
 | 5438 | void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction, | 
 | 5439 |                                                                Location ref, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 5440 |                                                                Register obj, | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5441 |                                                                uint32_t data_offset, | 
 | 5442 |                                                                Location index, | 
 | 5443 |                                                                Register temp, | 
 | 5444 |                                                                bool needs_null_check) { | 
 | 5445 |   DCHECK(kEmitCompilerReadBarrier); | 
 | 5446 |   DCHECK(kUseBakerReadBarrier); | 
 | 5447 |  | 
 | 5448 |   // Array cells are never volatile variables, therefore array loads | 
 | 5449 |   // never use Load-Acquire instructions on ARM64. | 
 | 5450 |   const bool use_load_acquire = false; | 
 | 5451 |  | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5452 |   static_assert( | 
 | 5453 |       sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t), | 
 | 5454 |       "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes."); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5455 |   // /* HeapReference<Object> */ ref = | 
 | 5456 |   //     *(obj + data_offset + index * sizeof(HeapReference<Object>)) | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5457 |   size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot); | 
 | 5458 |   GenerateReferenceLoadWithBakerReadBarrier(instruction, | 
 | 5459 |                                             ref, | 
 | 5460 |                                             obj, | 
 | 5461 |                                             data_offset, | 
 | 5462 |                                             index, | 
 | 5463 |                                             scale_factor, | 
 | 5464 |                                             temp, | 
 | 5465 |                                             needs_null_check, | 
 | 5466 |                                             use_load_acquire); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5467 | } | 
 | 5468 |  | 
 | 5469 | void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction, | 
 | 5470 |                                                                    Location ref, | 
| Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 5471 |                                                                    Register obj, | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5472 |                                                                    uint32_t offset, | 
 | 5473 |                                                                    Location index, | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5474 |                                                                    size_t scale_factor, | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5475 |                                                                    Register temp, | 
 | 5476 |                                                                    bool needs_null_check, | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 5477 |                                                                    bool use_load_acquire, | 
 | 5478 |                                                                    bool always_update_field) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5479 |   DCHECK(kEmitCompilerReadBarrier); | 
 | 5480 |   DCHECK(kUseBakerReadBarrier); | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5481 |   // If we are emitting an array load, we should not be using a | 
 | 5482 |   // Load Acquire instruction.  In other words: | 
 | 5483 |   // `instruction->IsArrayGet()` => `!use_load_acquire`. | 
 | 5484 |   DCHECK(!instruction->IsArrayGet() || !use_load_acquire); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5485 |  | 
 | 5486 |   MacroAssembler* masm = GetVIXLAssembler(); | 
 | 5487 |   UseScratchRegisterScope temps(masm); | 
 | 5488 |  | 
 | 5489 |   // In slow path based read barriers, the read barrier call is | 
 | 5490 |   // inserted after the original load. However, in fast path based | 
 | 5491 |   // Baker's read barriers, we need to perform the load of | 
 | 5492 |   // mirror::Object::monitor_ *before* the original reference load. | 
 | 5493 |   // This load-load ordering is required by the read barrier. | 
 | 5494 |   // The fast path/slow path (for Baker's algorithm) should look like: | 
 | 5495 |   // | 
 | 5496 |   //   uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState(); | 
 | 5497 |   //   lfence;  // Load fence or artificial data dependency to prevent load-load reordering | 
 | 5498 |   //   HeapReference<Object> ref = *src;  // Original reference load. | 
| Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 5499 |   //   bool is_gray = (rb_state == ReadBarrier::GrayState()); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5500 |   //   if (is_gray) { | 
 | 5501 |   //     ref = ReadBarrier::Mark(ref);  // Performed by runtime entrypoint slow path. | 
 | 5502 |   //   } | 
 | 5503 |   // | 
 | 5504 |   // Note: the original implementation in ReadBarrier::Barrier is | 
 | 5505 |   // slightly more complex as it performs additional checks that we do | 
 | 5506 |   // not do here for performance reasons. | 
 | 5507 |  | 
 | 5508 |   Primitive::Type type = Primitive::kPrimNot; | 
 | 5509 |   Register ref_reg = RegisterFrom(ref, type); | 
 | 5510 |   DCHECK(obj.IsW()); | 
 | 5511 |   uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value(); | 
 | 5512 |  | 
 | 5513 |   // /* int32_t */ monitor = obj->monitor_ | 
 | 5514 |   __ Ldr(temp, HeapOperand(obj, monitor_offset)); | 
 | 5515 |   if (needs_null_check) { | 
 | 5516 |     MaybeRecordImplicitNullCheck(instruction); | 
 | 5517 |   } | 
 | 5518 |   // /* LockWord */ lock_word = LockWord(monitor) | 
 | 5519 |   static_assert(sizeof(LockWord) == sizeof(int32_t), | 
 | 5520 |                 "art::LockWord and int32_t have different sizes."); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5521 |  | 
| Vladimir Marko | 877a033 | 2016-07-11 19:30:56 +0100 | [diff] [blame] | 5522 |   // Introduce a dependency on the lock_word including rb_state, | 
 | 5523 |   // to prevent load-load reordering, and without using | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5524 |   // a memory barrier (which would be more expensive). | 
| Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 5525 |   // `obj` is unchanged by this operation, but its value now depends | 
 | 5526 |   // on `temp`. | 
| Vladimir Marko | 877a033 | 2016-07-11 19:30:56 +0100 | [diff] [blame] | 5527 |   __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32)); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5528 |  | 
 | 5529 |   // The actual reference load. | 
 | 5530 |   if (index.IsValid()) { | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 5531 |     // Load types involving an "index": ArrayGet, | 
 | 5532 |     // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject | 
 | 5533 |     // intrinsics. | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5534 |     if (use_load_acquire) { | 
 | 5535 |       // UnsafeGetObjectVolatile intrinsic case. | 
 | 5536 |       // Register `index` is not an index in an object array, but an | 
 | 5537 |       // offset to an object reference field within object `obj`. | 
 | 5538 |       DCHECK(instruction->IsInvoke()) << instruction->DebugName(); | 
 | 5539 |       DCHECK(instruction->GetLocations()->Intrinsified()); | 
 | 5540 |       DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile) | 
 | 5541 |           << instruction->AsInvoke()->GetIntrinsic(); | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 5542 |       DCHECK_EQ(offset, 0u); | 
 | 5543 |       DCHECK_EQ(scale_factor, 0u); | 
 | 5544 |       DCHECK_EQ(needs_null_check, 0u); | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5545 |       // /* HeapReference<Object> */ ref = *(obj + index) | 
 | 5546 |       MemOperand field = HeapOperand(obj, XRegisterFrom(index)); | 
 | 5547 |       LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5548 |     } else { | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5549 |       // ArrayGet and UnsafeGetObject intrinsics cases. | 
 | 5550 |       // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor)) | 
 | 5551 |       if (index.IsConstant()) { | 
 | 5552 |         uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor); | 
 | 5553 |         Load(type, ref_reg, HeapOperand(obj, computed_offset)); | 
 | 5554 |       } else { | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 5555 |         Register temp3 = temps.AcquireW(); | 
 | 5556 |         __ Add(temp3, obj, offset); | 
 | 5557 |         Load(type, ref_reg, HeapOperand(temp3, XRegisterFrom(index), LSL, scale_factor)); | 
 | 5558 |         temps.Release(temp3); | 
| Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 5559 |       } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5560 |     } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5561 |   } else { | 
 | 5562 |     // /* HeapReference<Object> */ ref = *(obj + offset) | 
 | 5563 |     MemOperand field = HeapOperand(obj, offset); | 
 | 5564 |     if (use_load_acquire) { | 
 | 5565 |       LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false); | 
 | 5566 |     } else { | 
 | 5567 |       Load(type, ref_reg, field); | 
 | 5568 |     } | 
 | 5569 |   } | 
 | 5570 |  | 
 | 5571 |   // Object* ref = ref_addr->AsMirrorPtr() | 
 | 5572 |   GetAssembler()->MaybeUnpoisonHeapReference(ref_reg); | 
 | 5573 |  | 
| Vladimir Marko | 953437b | 2016-08-24 08:30:46 +0000 | [diff] [blame] | 5574 |   // Slow path marking the object `ref` when it is gray. | 
| Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 5575 |   SlowPathCodeARM64* slow_path; | 
 | 5576 |   if (always_update_field) { | 
 | 5577 |     // ReadBarrierMarkAndUpdateFieldSlowPathARM64 only supports | 
 | 5578 |     // address of the form `obj + field_offset`, where `obj` is a | 
 | 5579 |     // register and `field_offset` is a register. Thus `offset` and | 
 | 5580 |     // `scale_factor` above are expected to be null in this code path. | 
 | 5581 |     DCHECK_EQ(offset, 0u); | 
 | 5582 |     DCHECK_EQ(scale_factor, 0u);  /* "times 1" */ | 
 | 5583 |     slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathARM64( | 
 | 5584 |         instruction, ref, obj, /* field_offset */ index, temp); | 
 | 5585 |   } else { | 
 | 5586 |     slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref); | 
 | 5587 |   } | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5588 |   AddSlowPath(slow_path); | 
 | 5589 |  | 
| Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 5590 |   // if (rb_state == ReadBarrier::GrayState()) | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5591 |   //   ref = ReadBarrier::Mark(ref); | 
| Vladimir Marko | 877a033 | 2016-07-11 19:30:56 +0100 | [diff] [blame] | 5592 |   // Given the numeric representation, it's enough to check the low bit of the rb_state. | 
| Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 5593 |   static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0"); | 
 | 5594 |   static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1"); | 
| Vladimir Marko | 877a033 | 2016-07-11 19:30:56 +0100 | [diff] [blame] | 5595 |   __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel()); | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5596 |   __ Bind(slow_path->GetExitLabel()); | 
 | 5597 | } | 
 | 5598 |  | 
 | 5599 | void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction, | 
 | 5600 |                                                  Location out, | 
 | 5601 |                                                  Location ref, | 
 | 5602 |                                                  Location obj, | 
 | 5603 |                                                  uint32_t offset, | 
 | 5604 |                                                  Location index) { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5605 |   DCHECK(kEmitCompilerReadBarrier); | 
 | 5606 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5607 |   // Insert a slow path based read barrier *after* the reference load. | 
 | 5608 |   // | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5609 |   // If heap poisoning is enabled, the unpoisoning of the loaded | 
 | 5610 |   // reference will be carried out by the runtime within the slow | 
 | 5611 |   // path. | 
 | 5612 |   // | 
 | 5613 |   // Note that `ref` currently does not get unpoisoned (when heap | 
 | 5614 |   // poisoning is enabled), which is alright as the `ref` argument is | 
 | 5615 |   // not used by the artReadBarrierSlow entry point. | 
 | 5616 |   // | 
 | 5617 |   // TODO: Unpoison `ref` when it is used by artReadBarrierSlow. | 
 | 5618 |   SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) | 
 | 5619 |       ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index); | 
 | 5620 |   AddSlowPath(slow_path); | 
 | 5621 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5622 |   __ B(slow_path->GetEntryLabel()); | 
 | 5623 |   __ Bind(slow_path->GetExitLabel()); | 
 | 5624 | } | 
 | 5625 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5626 | void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction, | 
 | 5627 |                                                       Location out, | 
 | 5628 |                                                       Location ref, | 
 | 5629 |                                                       Location obj, | 
 | 5630 |                                                       uint32_t offset, | 
 | 5631 |                                                       Location index) { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5632 |   if (kEmitCompilerReadBarrier) { | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5633 |     // Baker's read barriers shall be handled by the fast path | 
 | 5634 |     // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier). | 
 | 5635 |     DCHECK(!kUseBakerReadBarrier); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5636 |     // If heap poisoning is enabled, unpoisoning will be taken care of | 
 | 5637 |     // by the runtime within the slow path. | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5638 |     GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index); | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5639 |   } else if (kPoisonHeapReferences) { | 
 | 5640 |     GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out)); | 
 | 5641 |   } | 
 | 5642 | } | 
 | 5643 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5644 | void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction, | 
 | 5645 |                                                         Location out, | 
 | 5646 |                                                         Location root) { | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5647 |   DCHECK(kEmitCompilerReadBarrier); | 
 | 5648 |  | 
| Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 5649 |   // Insert a slow path based read barrier *after* the GC root load. | 
 | 5650 |   // | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5651 |   // Note that GC roots are not affected by heap poisoning, so we do | 
 | 5652 |   // not need to do anything special for this here. | 
 | 5653 |   SlowPathCodeARM64* slow_path = | 
 | 5654 |       new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root); | 
 | 5655 |   AddSlowPath(slow_path); | 
 | 5656 |  | 
| Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 5657 |   __ B(slow_path->GetEntryLabel()); | 
 | 5658 |   __ Bind(slow_path->GetExitLabel()); | 
 | 5659 | } | 
 | 5660 |  | 
| Nicolas Geoffray | a42363f | 2015-12-17 14:57:09 +0000 | [diff] [blame] | 5661 | void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) { | 
 | 5662 |   LocationSummary* locations = | 
 | 5663 |       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); | 
 | 5664 |   locations->SetInAt(0, Location::RequiresRegister()); | 
 | 5665 |   locations->SetOut(Location::RequiresRegister()); | 
 | 5666 | } | 
 | 5667 |  | 
 | 5668 | void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) { | 
 | 5669 |   LocationSummary* locations = instruction->GetLocations(); | 
| Vladimir Marko | a1de918 | 2016-02-25 11:37:38 +0000 | [diff] [blame] | 5670 |   if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) { | 
| Nicolas Geoffray | ff484b9 | 2016-07-13 14:13:48 +0100 | [diff] [blame] | 5671 |     uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset( | 
| Nicolas Geoffray | a42363f | 2015-12-17 14:57:09 +0000 | [diff] [blame] | 5672 |         instruction->GetIndex(), kArm64PointerSize).SizeValue(); | 
| Nicolas Geoffray | ff484b9 | 2016-07-13 14:13:48 +0100 | [diff] [blame] | 5673 |     __ Ldr(XRegisterFrom(locations->Out()), | 
 | 5674 |            MemOperand(XRegisterFrom(locations->InAt(0)), method_offset)); | 
| Nicolas Geoffray | a42363f | 2015-12-17 14:57:09 +0000 | [diff] [blame] | 5675 |   } else { | 
| Nicolas Geoffray | ff484b9 | 2016-07-13 14:13:48 +0100 | [diff] [blame] | 5676 |     uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement( | 
| Matthew Gharrity | 465ecc8 | 2016-07-19 21:32:52 +0000 | [diff] [blame] | 5677 |         instruction->GetIndex(), kArm64PointerSize)); | 
| Artem Udovichenko | a62cb9b | 2016-06-30 09:18:25 +0000 | [diff] [blame] | 5678 |     __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)), | 
 | 5679 |         mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value())); | 
| Nicolas Geoffray | ff484b9 | 2016-07-13 14:13:48 +0100 | [diff] [blame] | 5680 |     __ Ldr(XRegisterFrom(locations->Out()), | 
 | 5681 |            MemOperand(XRegisterFrom(locations->Out()), method_offset)); | 
| Nicolas Geoffray | a42363f | 2015-12-17 14:57:09 +0000 | [diff] [blame] | 5682 |   } | 
| Nicolas Geoffray | a42363f | 2015-12-17 14:57:09 +0000 | [diff] [blame] | 5683 | } | 
 | 5684 |  | 
| Nicolas Geoffray | 997d121 | 2016-11-09 10:36:29 +0000 | [diff] [blame] | 5685 | void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) { | 
 | 5686 |   for (const auto& entry : jit_string_patches_) { | 
 | 5687 |     const auto& it = jit_string_roots_.find(entry.first); | 
 | 5688 |     DCHECK(it != jit_string_roots_.end()); | 
 | 5689 |     size_t index_in_table = it->second; | 
 | 5690 |     vixl::aarch64::Literal<uint32_t>* literal = entry.second; | 
 | 5691 |     uint32_t literal_offset = literal->GetOffset(); | 
 | 5692 |     uintptr_t address = | 
 | 5693 |         reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>); | 
 | 5694 |     uint8_t* data = code + literal_offset; | 
 | 5695 |     reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address); | 
 | 5696 |   } | 
 | 5697 | } | 
| Nicolas Geoffray | a42363f | 2015-12-17 14:57:09 +0000 | [diff] [blame] | 5698 |  | 
| Alexandre Rames | 67555f7 | 2014-11-18 10:55:16 +0000 | [diff] [blame] | 5699 | #undef __ | 
 | 5700 | #undef QUICK_ENTRY_POINT | 
 | 5701 |  | 
| Alexandre Rames | 5319def | 2014-10-23 10:03:10 +0100 | [diff] [blame] | 5702 | }  // namespace arm64 | 
 | 5703 | }  // namespace art |