Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "prepare_for_register_allocation.h" |
| 18 | |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 19 | #include "dex/dex_file_types.h" |
Nicolas Geoffray | 61ba8d2 | 2018-08-07 09:55:57 +0100 | [diff] [blame] | 20 | #include "driver/compiler_options.h" |
Vladimir Marko | a3ad0cd | 2018-05-04 10:06:38 +0100 | [diff] [blame] | 21 | #include "jni/jni_internal.h" |
Igor Murashkin | 6ef4567 | 2017-08-08 13:59:55 -0700 | [diff] [blame] | 22 | #include "optimizing_compiler_stats.h" |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 23 | #include "well_known_classes.h" |
| 24 | |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 25 | namespace art { |
| 26 | |
| 27 | void PrepareForRegisterAllocation::Run() { |
| 28 | // Order does not matter. |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 29 | for (HBasicBlock* block : GetGraph()->GetReversePostOrder()) { |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 30 | // No need to visit the phis. |
Nicolas Geoffray | 61ba8d2 | 2018-08-07 09:55:57 +0100 | [diff] [blame] | 31 | for (HInstructionIteratorHandleChanges inst_it(block->GetInstructions()); !inst_it.Done(); |
Andreas Gampe | 277ccbd | 2014-11-03 21:36:10 -0800 | [diff] [blame] | 32 | inst_it.Advance()) { |
| 33 | inst_it.Current()->Accept(this); |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 34 | } |
| 35 | } |
| 36 | } |
| 37 | |
Vladimir Marko | 175e786 | 2018-03-27 09:03:13 +0000 | [diff] [blame] | 38 | void PrepareForRegisterAllocation::VisitCheckCast(HCheckCast* check_cast) { |
| 39 | // Record only those bitstring type checks that make it to the codegen stage. |
| 40 | if (check_cast->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) { |
| 41 | MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck); |
| 42 | } |
| 43 | } |
| 44 | |
| 45 | void PrepareForRegisterAllocation::VisitInstanceOf(HInstanceOf* instance_of) { |
| 46 | // Record only those bitstring type checks that make it to the codegen stage. |
| 47 | if (instance_of->GetTypeCheckKind() == TypeCheckKind::kBitstringCheck) { |
| 48 | MaybeRecordStat(stats_, MethodCompilationStat::kBitstringTypeCheck); |
| 49 | } |
| 50 | } |
| 51 | |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 52 | void PrepareForRegisterAllocation::VisitNullCheck(HNullCheck* check) { |
| 53 | check->ReplaceWith(check->InputAt(0)); |
Nicolas Geoffray | 61ba8d2 | 2018-08-07 09:55:57 +0100 | [diff] [blame] | 54 | if (compiler_options_.GetImplicitNullChecks()) { |
| 55 | HInstruction* next = check->GetNext(); |
| 56 | |
| 57 | // The `PrepareForRegisterAllocation` pass removes `HBoundType` from the graph, |
| 58 | // so do it ourselves now to not prevent optimizations. |
| 59 | while (next->IsBoundType()) { |
| 60 | next = next->GetNext(); |
| 61 | VisitBoundType(next->GetPrevious()->AsBoundType()); |
| 62 | } |
| 63 | if (next->CanDoImplicitNullCheckOn(check->InputAt(0))) { |
| 64 | check->MarkEmittedAtUseSite(); |
| 65 | } |
| 66 | } |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 67 | } |
| 68 | |
Calin Juravle | d0d4852 | 2014-11-04 16:40:20 +0000 | [diff] [blame] | 69 | void PrepareForRegisterAllocation::VisitDivZeroCheck(HDivZeroCheck* check) { |
| 70 | check->ReplaceWith(check->InputAt(0)); |
| 71 | } |
| 72 | |
Nicolas Geoffray | 6f8e2c9 | 2017-03-23 14:37:26 +0000 | [diff] [blame] | 73 | void PrepareForRegisterAllocation::VisitDeoptimize(HDeoptimize* deoptimize) { |
| 74 | if (deoptimize->GuardsAnInput()) { |
| 75 | // Replace the uses with the actual guarded instruction. |
| 76 | deoptimize->ReplaceWith(deoptimize->GuardedInput()); |
| 77 | deoptimize->RemoveGuard(); |
| 78 | } |
| 79 | } |
| 80 | |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 81 | void PrepareForRegisterAllocation::VisitBoundsCheck(HBoundsCheck* check) { |
| 82 | check->ReplaceWith(check->InputAt(0)); |
Vladimir Marko | 87f3fcb | 2016-04-28 15:52:11 +0100 | [diff] [blame] | 83 | if (check->IsStringCharAt()) { |
Vladimir Marko | 92f7f3c | 2017-10-31 11:38:30 +0000 | [diff] [blame] | 84 | // Add a fake environment for String.charAt() inline info as we want the exception |
| 85 | // to appear as being thrown from there. Skip if we're compiling String.charAt() itself. |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 86 | ArtMethod* char_at_method = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt); |
Vladimir Marko | 92f7f3c | 2017-10-31 11:38:30 +0000 | [diff] [blame] | 87 | if (GetGraph()->GetArtMethod() != char_at_method) { |
| 88 | ArenaAllocator* allocator = GetGraph()->GetAllocator(); |
| 89 | HEnvironment* environment = new (allocator) HEnvironment(allocator, |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 90 | /* number_of_vregs= */ 0u, |
Vladimir Marko | 92f7f3c | 2017-10-31 11:38:30 +0000 | [diff] [blame] | 91 | char_at_method, |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 92 | /* dex_pc= */ dex::kDexNoIndex, |
Vladimir Marko | 92f7f3c | 2017-10-31 11:38:30 +0000 | [diff] [blame] | 93 | check); |
| 94 | check->InsertRawEnvironment(environment); |
| 95 | } |
Vladimir Marko | 87f3fcb | 2016-04-28 15:52:11 +0100 | [diff] [blame] | 96 | } |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 97 | } |
| 98 | |
Calin Juravle | b1498f6 | 2015-02-16 13:13:29 +0000 | [diff] [blame] | 99 | void PrepareForRegisterAllocation::VisitBoundType(HBoundType* bound_type) { |
| 100 | bound_type->ReplaceWith(bound_type->InputAt(0)); |
| 101 | bound_type->GetBlock()->RemoveInstruction(bound_type); |
| 102 | } |
| 103 | |
Roland Levillain | b133ec6 | 2016-03-23 12:40:35 +0000 | [diff] [blame] | 104 | void PrepareForRegisterAllocation::VisitArraySet(HArraySet* instruction) { |
| 105 | HInstruction* value = instruction->GetValue(); |
| 106 | // PrepareForRegisterAllocation::VisitBoundType may have replaced a |
| 107 | // BoundType (as value input of this ArraySet) with a NullConstant. |
| 108 | // If so, this ArraySet no longer needs a type check. |
| 109 | if (value->IsNullConstant()) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 110 | DCHECK_EQ(value->GetType(), DataType::Type::kReference); |
Roland Levillain | b133ec6 | 2016-03-23 12:40:35 +0000 | [diff] [blame] | 111 | if (instruction->NeedsTypeCheck()) { |
| 112 | instruction->ClearNeedsTypeCheck(); |
| 113 | } |
| 114 | } |
| 115 | } |
| 116 | |
Nicolas Geoffray | 19a19cf | 2014-10-22 16:07:05 +0100 | [diff] [blame] | 117 | void PrepareForRegisterAllocation::VisitClinitCheck(HClinitCheck* check) { |
Nicolas Geoffray | 729645a | 2015-11-19 13:29:02 +0000 | [diff] [blame] | 118 | // Try to find a static invoke or a new-instance from which this check originated. |
| 119 | HInstruction* implicit_clinit = nullptr; |
Vladimir Marko | 46817b8 | 2016-03-29 12:21:58 +0100 | [diff] [blame] | 120 | for (const HUseListNode<HInstruction*>& use : check->GetUses()) { |
| 121 | HInstruction* user = use.GetUser(); |
Nicolas Geoffray | 729645a | 2015-11-19 13:29:02 +0000 | [diff] [blame] | 122 | if ((user->IsInvokeStaticOrDirect() || user->IsNewInstance()) && |
| 123 | CanMoveClinitCheck(check, user)) { |
| 124 | implicit_clinit = user; |
| 125 | if (user->IsInvokeStaticOrDirect()) { |
| 126 | DCHECK(user->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()); |
| 127 | user->AsInvokeStaticOrDirect()->RemoveExplicitClinitCheck( |
| 128 | HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit); |
| 129 | } else { |
| 130 | DCHECK(user->IsNewInstance()); |
| 131 | // We delegate the initialization duty to the allocation. |
| 132 | if (user->AsNewInstance()->GetEntrypoint() == kQuickAllocObjectInitialized) { |
| 133 | user->AsNewInstance()->SetEntrypoint(kQuickAllocObjectResolved); |
| 134 | } |
| 135 | } |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 136 | break; |
| 137 | } |
| 138 | } |
Nicolas Geoffray | 729645a | 2015-11-19 13:29:02 +0000 | [diff] [blame] | 139 | // If we found a static invoke or new-instance for merging, remove the check |
| 140 | // from dominated static invokes. |
| 141 | if (implicit_clinit != nullptr) { |
Vladimir Marko | 46817b8 | 2016-03-29 12:21:58 +0100 | [diff] [blame] | 142 | const HUseList<HInstruction*>& uses = check->GetUses(); |
| 143 | for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) { |
| 144 | HInstruction* user = it->GetUser(); |
Nicolas Geoffray | 729645a | 2015-11-19 13:29:02 +0000 | [diff] [blame] | 145 | // All other uses must be dominated. |
| 146 | DCHECK(implicit_clinit->StrictlyDominates(user) || (implicit_clinit == user)); |
Vladimir Marko | 46817b8 | 2016-03-29 12:21:58 +0100 | [diff] [blame] | 147 | ++it; // Advance before we remove the node, reference to the next node is preserved. |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 148 | if (user->IsInvokeStaticOrDirect()) { |
| 149 | user->AsInvokeStaticOrDirect()->RemoveExplicitClinitCheck( |
| 150 | HInvokeStaticOrDirect::ClinitCheckRequirement::kNone); |
| 151 | } |
| 152 | } |
| 153 | } |
| 154 | |
| 155 | HLoadClass* load_class = check->GetLoadClass(); |
| 156 | bool can_merge_with_load_class = CanMoveClinitCheck(load_class, check); |
| 157 | |
| 158 | check->ReplaceWith(load_class); |
| 159 | |
Nicolas Geoffray | 729645a | 2015-11-19 13:29:02 +0000 | [diff] [blame] | 160 | if (implicit_clinit != nullptr) { |
| 161 | // Remove the check from the graph. It has been merged into the invoke or new-instance. |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 162 | check->GetBlock()->RemoveInstruction(check); |
| 163 | // Check if we can merge the load class as well. |
| 164 | if (can_merge_with_load_class && !load_class->HasUses()) { |
| 165 | load_class->GetBlock()->RemoveInstruction(load_class); |
| 166 | } |
Vladimir Marko | a9f303c | 2018-07-20 16:43:56 +0100 | [diff] [blame] | 167 | } else if (can_merge_with_load_class && |
| 168 | load_class->GetLoadKind() != HLoadClass::LoadKind::kRuntimeCall) { |
| 169 | DCHECK(!load_class->NeedsAccessCheck()); |
Nicolas Geoffray | 424f676 | 2014-11-03 14:51:25 +0000 | [diff] [blame] | 170 | // Pass the initialization duty to the `HLoadClass` instruction, |
| 171 | // and remove the instruction from the graph. |
Mathieu Chartier | 1ceb37c | 2016-08-30 10:23:01 -0700 | [diff] [blame] | 172 | DCHECK(load_class->HasEnvironment()); |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 173 | load_class->SetMustGenerateClinitCheck(true); |
Nicolas Geoffray | 424f676 | 2014-11-03 14:51:25 +0000 | [diff] [blame] | 174 | check->GetBlock()->RemoveInstruction(check); |
| 175 | } |
Nicolas Geoffray | 19a19cf | 2014-10-22 16:07:05 +0100 | [diff] [blame] | 176 | } |
| 177 | |
David Brazdil | b3e773e | 2016-01-26 11:28:37 +0000 | [diff] [blame] | 178 | bool PrepareForRegisterAllocation::CanEmitConditionAt(HCondition* condition, |
| 179 | HInstruction* user) const { |
| 180 | if (condition->GetNext() != user) { |
| 181 | return false; |
Nicolas Geoffray | 360231a | 2014-10-08 21:07:48 +0100 | [diff] [blame] | 182 | } |
David Brazdil | b3e773e | 2016-01-26 11:28:37 +0000 | [diff] [blame] | 183 | |
| 184 | if (user->IsIf() || user->IsDeoptimize()) { |
| 185 | return true; |
| 186 | } |
| 187 | |
David Brazdil | 74eb1b2 | 2015-12-14 11:44:01 +0000 | [diff] [blame] | 188 | if (user->IsSelect() && user->AsSelect()->GetCondition() == condition) { |
Mark Mendell | 0c5b18e | 2016-02-06 13:58:35 -0500 | [diff] [blame] | 189 | return true; |
David Brazdil | 74eb1b2 | 2015-12-14 11:44:01 +0000 | [diff] [blame] | 190 | } |
| 191 | |
David Brazdil | b3e773e | 2016-01-26 11:28:37 +0000 | [diff] [blame] | 192 | return false; |
| 193 | } |
| 194 | |
| 195 | void PrepareForRegisterAllocation::VisitCondition(HCondition* condition) { |
| 196 | if (condition->HasOnlyOneNonEnvironmentUse()) { |
Vladimir Marko | 46817b8 | 2016-03-29 12:21:58 +0100 | [diff] [blame] | 197 | HInstruction* user = condition->GetUses().front().GetUser(); |
David Brazdil | b3e773e | 2016-01-26 11:28:37 +0000 | [diff] [blame] | 198 | if (CanEmitConditionAt(condition, user)) { |
| 199 | condition->MarkEmittedAtUseSite(); |
| 200 | } |
Nicolas Geoffray | 360231a | 2014-10-08 21:07:48 +0100 | [diff] [blame] | 201 | } |
| 202 | } |
| 203 | |
Igor Murashkin | d01745e | 2017-04-05 16:40:31 -0700 | [diff] [blame] | 204 | void PrepareForRegisterAllocation::VisitConstructorFence(HConstructorFence* constructor_fence) { |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 205 | // Trivially remove redundant HConstructorFence when it immediately follows an HNewInstance |
| 206 | // to an uninitialized class. In this special case, the art_quick_alloc_object_resolved |
| 207 | // will already have the 'dmb' which is strictly stronger than an HConstructorFence. |
| 208 | // |
| 209 | // The instruction builder always emits "x = HNewInstance; HConstructorFence(x)" so this |
| 210 | // is effectively pattern-matching that particular case and undoing the redundancy the builder |
| 211 | // had introduced. |
| 212 | // |
| 213 | // TODO: Move this to a separate pass. |
| 214 | HInstruction* allocation_inst = constructor_fence->GetAssociatedAllocation(); |
| 215 | if (allocation_inst != nullptr && allocation_inst->IsNewInstance()) { |
| 216 | HNewInstance* new_inst = allocation_inst->AsNewInstance(); |
| 217 | // This relies on the entrypoint already being set to the more optimized version; |
| 218 | // as that happens in this pass, this redundancy removal also cannot happen any earlier. |
| 219 | if (new_inst != nullptr && new_inst->GetEntrypoint() == kQuickAllocObjectResolved) { |
| 220 | // If this was done in an earlier pass, we would want to match that `previous` was an input |
| 221 | // to the `constructor_fence`. However, since this pass removes the inputs to the fence, |
| 222 | // we can ignore the inputs and just remove the instruction from its block. |
| 223 | DCHECK_EQ(1u, constructor_fence->InputCount()); |
| 224 | // TODO: GetAssociatedAllocation should not care about multiple inputs |
| 225 | // if we are in prepare_for_register_allocation pass only. |
| 226 | constructor_fence->GetBlock()->RemoveInstruction(constructor_fence); |
Igor Murashkin | 6ef4567 | 2017-08-08 13:59:55 -0700 | [diff] [blame] | 227 | MaybeRecordStat(stats_, |
| 228 | MethodCompilationStat::kConstructorFenceRemovedPFRA); |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 229 | return; |
Igor Murashkin | 79d8fa7 | 2017-04-18 09:37:23 -0700 | [diff] [blame] | 230 | } |
| 231 | |
| 232 | // HNewArray does not need this check because the art_quick_alloc_array does not itself |
| 233 | // have a dmb in any normal situation (i.e. the array class is never exactly in the |
| 234 | // "resolved" state). If the array class is not yet loaded, it will always go from |
| 235 | // Unloaded->Initialized state. |
| 236 | } |
| 237 | |
| 238 | // Remove all the inputs to the constructor fence; |
Igor Murashkin | d01745e | 2017-04-05 16:40:31 -0700 | [diff] [blame] | 239 | // they aren't used by the InstructionCodeGenerator and this lets us avoid creating a |
| 240 | // LocationSummary in the LocationsBuilder. |
| 241 | constructor_fence->RemoveAllInputs(); |
| 242 | } |
| 243 | |
Roland Levillain | 4c0eb42 | 2015-04-24 16:43:49 +0100 | [diff] [blame] | 244 | void PrepareForRegisterAllocation::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { |
| 245 | if (invoke->IsStaticWithExplicitClinitCheck()) { |
Nicolas Geoffray | e829831 | 2017-08-03 09:51:25 +0100 | [diff] [blame] | 246 | HInstruction* last_input = invoke->GetInputs().back(); |
| 247 | DCHECK(last_input->IsLoadClass()) |
Calin Juravle | 0ba218d | 2015-05-19 18:46:01 +0100 | [diff] [blame] | 248 | << "Last input is not HLoadClass. It is " << last_input->DebugName(); |
| 249 | |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 250 | // Detach the explicit class initialization check from the invoke. |
| 251 | // Keeping track of the initializing instruction is no longer required |
| 252 | // at this stage (i.e., after inlining has been performed). |
| 253 | invoke->RemoveExplicitClinitCheck(HInvokeStaticOrDirect::ClinitCheckRequirement::kNone); |
Roland Levillain | 4c0eb42 | 2015-04-24 16:43:49 +0100 | [diff] [blame] | 254 | |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 255 | // Merging with load class should have happened in VisitClinitCheck(). |
| 256 | DCHECK(!CanMoveClinitCheck(last_input, invoke)); |
| 257 | } |
| 258 | } |
Nicolas Geoffray | 78f4fa7 | 2015-06-12 09:35:05 +0100 | [diff] [blame] | 259 | |
David Brazdil | b3e773e | 2016-01-26 11:28:37 +0000 | [diff] [blame] | 260 | bool PrepareForRegisterAllocation::CanMoveClinitCheck(HInstruction* input, |
| 261 | HInstruction* user) const { |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 262 | // Determine if input and user come from the same dex instruction, so that we can move |
| 263 | // the clinit check responsibility from one to the other, i.e. from HClinitCheck (user) |
Vladimir Marko | c7591b4 | 2016-06-29 14:59:07 +0100 | [diff] [blame] | 264 | // to HLoadClass (input), or from HClinitCheck (input) to HInvokeStaticOrDirect (user), |
| 265 | // or from HLoadClass (input) to HNewInstance (user). |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 266 | |
| 267 | // Start with a quick dex pc check. |
| 268 | if (user->GetDexPc() != input->GetDexPc()) { |
| 269 | return false; |
| 270 | } |
| 271 | |
| 272 | // Now do a thorough environment check that this is really coming from the same instruction in |
| 273 | // the same inlined graph. Unfortunately, we have to go through the whole environment chain. |
| 274 | HEnvironment* user_environment = user->GetEnvironment(); |
| 275 | HEnvironment* input_environment = input->GetEnvironment(); |
| 276 | while (user_environment != nullptr || input_environment != nullptr) { |
| 277 | if (user_environment == nullptr || input_environment == nullptr) { |
| 278 | // Different environment chain length. This happens when a method is called |
| 279 | // once directly and once indirectly through another inlined method. |
| 280 | return false; |
| 281 | } |
| 282 | if (user_environment->GetDexPc() != input_environment->GetDexPc() || |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 283 | user_environment->GetMethod() != input_environment->GetMethod()) { |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 284 | return false; |
| 285 | } |
| 286 | user_environment = user_environment->GetParent(); |
| 287 | input_environment = input_environment->GetParent(); |
| 288 | } |
| 289 | |
| 290 | // Check for code motion taking the input to a different block. |
| 291 | if (user->GetBlock() != input->GetBlock()) { |
| 292 | return false; |
| 293 | } |
| 294 | |
| 295 | // In debug mode, check that we have not inserted a throwing instruction |
| 296 | // or an instruction with side effects between input and user. |
| 297 | if (kIsDebugBuild) { |
| 298 | for (HInstruction* between = input->GetNext(); between != user; between = between->GetNext()) { |
| 299 | CHECK(between != nullptr); // User must be after input in the same block. |
| 300 | CHECK(!between->CanThrow()); |
| 301 | CHECK(!between->HasSideEffects()); |
Roland Levillain | 4c0eb42 | 2015-04-24 16:43:49 +0100 | [diff] [blame] | 302 | } |
| 303 | } |
Vladimir Marko | fbb184a | 2015-11-13 14:47:00 +0000 | [diff] [blame] | 304 | return true; |
Roland Levillain | 4c0eb42 | 2015-04-24 16:43:49 +0100 | [diff] [blame] | 305 | } |
| 306 | |
Nicolas Geoffray | acc56ac | 2018-10-09 08:45:24 +0100 | [diff] [blame] | 307 | void PrepareForRegisterAllocation::VisitTypeConversion(HTypeConversion* instruction) { |
| 308 | // For simplicity, our code generators don't handle implicit type conversion, so ensure |
| 309 | // there are none before hitting codegen. |
| 310 | if (instruction->IsImplicitConversion()) { |
| 311 | instruction->ReplaceWith(instruction->GetInput()); |
| 312 | instruction->GetBlock()->RemoveInstruction(instruction); |
| 313 | } |
| 314 | } |
| 315 | |
Nicolas Geoffray | 26a25ef | 2014-09-30 13:54:09 +0100 | [diff] [blame] | 316 | } // namespace art |