blob: 27f9ac399021bde8c618bacc1286928655b72385 [file] [log] [blame]
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "register_allocation_resolver.h"
18
Vladimir Marko174b2e22017-10-12 13:34:49 +010019#include "base/bit_vector-inl.h"
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070020#include "code_generator.h"
Aart Bik96202302016-10-04 17:33:56 -070021#include "linear_order.h"
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070022#include "ssa_liveness_analysis.h"
23
24namespace art {
25
Vladimir Markoe764d2e2017-10-05 14:35:55 +010026RegisterAllocationResolver::RegisterAllocationResolver(CodeGenerator* codegen,
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070027 const SsaLivenessAnalysis& liveness)
Vladimir Markoe764d2e2017-10-05 14:35:55 +010028 : allocator_(codegen->GetGraph()->GetAllocator()),
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070029 codegen_(codegen),
30 liveness_(liveness) {}
31
Vladimir Marko70e97462016-08-09 11:04:26 +010032void RegisterAllocationResolver::Resolve(ArrayRef<HInstruction* const> safepoints,
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070033 size_t reserved_out_slots,
34 size_t int_spill_slots,
35 size_t long_spill_slots,
36 size_t float_spill_slots,
37 size_t double_spill_slots,
38 size_t catch_phi_spill_slots,
Vladimir Markoe764d2e2017-10-05 14:35:55 +010039 ArrayRef<LiveInterval* const> temp_intervals) {
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070040 size_t spill_slots = int_spill_slots
41 + long_spill_slots
42 + float_spill_slots
43 + double_spill_slots
44 + catch_phi_spill_slots;
45
Vladimir Marko70e97462016-08-09 11:04:26 +010046 // Update safepoints and calculate the size of the spills.
47 UpdateSafepointLiveRegisters();
48 size_t maximum_safepoint_spill_size = CalculateMaximumSafepointSpillSize(safepoints);
49
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070050 // Computes frame size and spill mask.
51 codegen_->InitializeCodeGeneration(spill_slots,
Vladimir Marko70e97462016-08-09 11:04:26 +010052 maximum_safepoint_spill_size,
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070053 reserved_out_slots, // Includes slot(s) for the art method.
54 codegen_->GetGraph()->GetLinearOrder());
55
56 // Resolve outputs, including stack locations.
57 // TODO: Use pointers of Location inside LiveInterval to avoid doing another iteration.
58 for (size_t i = 0, e = liveness_.GetNumberOfSsaValues(); i < e; ++i) {
59 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
60 LiveInterval* current = instruction->GetLiveInterval();
61 LocationSummary* locations = instruction->GetLocations();
62 Location location = locations->Out();
63 if (instruction->IsParameterValue()) {
64 // Now that we know the frame size, adjust the parameter's location.
65 if (location.IsStackSlot()) {
66 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
67 current->SetSpillSlot(location.GetStackIndex());
68 locations->UpdateOut(location);
69 } else if (location.IsDoubleStackSlot()) {
70 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
71 current->SetSpillSlot(location.GetStackIndex());
72 locations->UpdateOut(location);
73 } else if (current->HasSpillSlot()) {
74 current->SetSpillSlot(current->GetSpillSlot() + codegen_->GetFrameSize());
75 }
76 } else if (instruction->IsCurrentMethod()) {
77 // The current method is always at offset 0.
78 DCHECK(!current->HasSpillSlot() || (current->GetSpillSlot() == 0));
79 } else if (instruction->IsPhi() && instruction->AsPhi()->IsCatchPhi()) {
80 DCHECK(current->HasSpillSlot());
81 size_t slot = current->GetSpillSlot()
82 + spill_slots
83 + reserved_out_slots
84 - catch_phi_spill_slots;
85 current->SetSpillSlot(slot * kVRegSize);
86 } else if (current->HasSpillSlot()) {
87 // Adjust the stack slot, now that we know the number of them for each type.
88 // The way this implementation lays out the stack is the following:
89 // [parameter slots ]
Mingyao Yang063fc772016-08-02 11:02:54 -070090 // [art method (caller) ]
91 // [entry spill (core) ]
92 // [entry spill (float) ]
93 // [should_deoptimize flag] (this is optional)
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -070094 // [catch phi spill slots ]
95 // [double spill slots ]
96 // [long spill slots ]
97 // [float spill slots ]
98 // [int/ref values ]
99 // [maximum out values ] (number of arguments for calls)
100 // [art method ].
101 size_t slot = current->GetSpillSlot();
102 switch (current->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100103 case DataType::Type::kFloat64:
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700104 slot += long_spill_slots;
105 FALLTHROUGH_INTENDED;
Aart Bik66c158e2018-01-31 12:55:04 -0800106 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100107 case DataType::Type::kInt64:
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700108 slot += float_spill_slots;
109 FALLTHROUGH_INTENDED;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100110 case DataType::Type::kFloat32:
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700111 slot += int_spill_slots;
112 FALLTHROUGH_INTENDED;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100113 case DataType::Type::kReference:
Aart Bik66c158e2018-01-31 12:55:04 -0800114 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100115 case DataType::Type::kInt32:
116 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +0100117 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100118 case DataType::Type::kInt8:
119 case DataType::Type::kBool:
120 case DataType::Type::kInt16:
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700121 slot += reserved_out_slots;
122 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100123 case DataType::Type::kVoid:
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700124 LOG(FATAL) << "Unexpected type for interval " << current->GetType();
125 }
126 current->SetSpillSlot(slot * kVRegSize);
127 }
128
129 Location source = current->ToLocation();
130
131 if (location.IsUnallocated()) {
132 if (location.GetPolicy() == Location::kSameAsFirstInput) {
133 if (locations->InAt(0).IsUnallocated()) {
134 locations->SetInAt(0, source);
135 } else {
136 DCHECK(locations->InAt(0).Equals(source));
137 }
138 }
139 locations->UpdateOut(source);
140 } else {
141 DCHECK(source.Equals(location));
142 }
143 }
144
145 // Connect siblings and resolve inputs.
146 for (size_t i = 0, e = liveness_.GetNumberOfSsaValues(); i < e; ++i) {
147 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
Vladimir Marko70e97462016-08-09 11:04:26 +0100148 ConnectSiblings(instruction->GetLiveInterval());
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700149 }
150
151 // Resolve non-linear control flow across branches. Order does not matter.
Aart Bik96202302016-10-04 17:33:56 -0700152 for (HBasicBlock* block : codegen_->GetGraph()->GetLinearOrder()) {
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700153 if (block->IsCatchBlock() ||
154 (block->IsLoopHeader() && block->GetLoopInformation()->IsIrreducible())) {
155 // Instructions live at the top of catch blocks or irreducible loop header
156 // were forced to spill.
157 if (kIsDebugBuild) {
158 BitVector* live = liveness_.GetLiveInSet(*block);
159 for (uint32_t idx : live->Indexes()) {
160 LiveInterval* interval = liveness_.GetInstructionFromSsaIndex(idx)->GetLiveInterval();
161 LiveInterval* sibling = interval->GetSiblingAt(block->GetLifetimeStart());
162 // `GetSiblingAt` returns the sibling that contains a position, but there could be
163 // a lifetime hole in it. `CoversSlow` returns whether the interval is live at that
164 // position.
165 if ((sibling != nullptr) && sibling->CoversSlow(block->GetLifetimeStart())) {
166 DCHECK(!sibling->HasRegister());
167 }
168 }
169 }
170 } else {
171 BitVector* live = liveness_.GetLiveInSet(*block);
172 for (uint32_t idx : live->Indexes()) {
173 LiveInterval* interval = liveness_.GetInstructionFromSsaIndex(idx)->GetLiveInterval();
174 for (HBasicBlock* predecessor : block->GetPredecessors()) {
175 ConnectSplitSiblings(interval, predecessor, block);
176 }
177 }
178 }
179 }
180
181 // Resolve phi inputs. Order does not matter.
Aart Bik96202302016-10-04 17:33:56 -0700182 for (HBasicBlock* block : codegen_->GetGraph()->GetLinearOrder()) {
183 if (block->IsCatchBlock()) {
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700184 // Catch phi values are set at runtime by the exception delivery mechanism.
185 } else {
Aart Bik96202302016-10-04 17:33:56 -0700186 for (HInstructionIterator inst_it(block->GetPhis()); !inst_it.Done(); inst_it.Advance()) {
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700187 HInstruction* phi = inst_it.Current();
Aart Bik96202302016-10-04 17:33:56 -0700188 for (size_t i = 0, e = block->GetPredecessors().size(); i < e; ++i) {
189 HBasicBlock* predecessor = block->GetPredecessors()[i];
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700190 DCHECK_EQ(predecessor->GetNormalSuccessors().size(), 1u);
191 HInstruction* input = phi->InputAt(i);
192 Location source = input->GetLiveInterval()->GetLocationAt(
193 predecessor->GetLifetimeEnd() - 1);
194 Location destination = phi->GetLiveInterval()->ToLocation();
195 InsertParallelMoveAtExitOf(predecessor, phi, source, destination);
196 }
197 }
198 }
199 }
200
201 // Resolve temp locations.
202 for (LiveInterval* temp : temp_intervals) {
203 if (temp->IsHighInterval()) {
204 // High intervals can be skipped, they are already handled by the low interval.
205 continue;
206 }
207 HInstruction* at = liveness_.GetTempUser(temp);
208 size_t temp_index = liveness_.GetTempIndex(temp);
209 LocationSummary* locations = at->GetLocations();
210 switch (temp->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100211 case DataType::Type::kInt32:
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700212 locations->SetTempAt(temp_index, Location::RegisterLocation(temp->GetRegister()));
213 break;
214
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100215 case DataType::Type::kFloat64:
216 if (codegen_->NeedsTwoRegisters(DataType::Type::kFloat64)) {
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700217 Location location = Location::FpuRegisterPairLocation(
218 temp->GetRegister(), temp->GetHighInterval()->GetRegister());
219 locations->SetTempAt(temp_index, location);
220 } else {
221 locations->SetTempAt(temp_index, Location::FpuRegisterLocation(temp->GetRegister()));
222 }
223 break;
224
225 default:
226 LOG(FATAL) << "Unexpected type for temporary location "
227 << temp->GetType();
228 }
229 }
230}
231
Vladimir Marko70e97462016-08-09 11:04:26 +0100232void RegisterAllocationResolver::UpdateSafepointLiveRegisters() {
233 for (size_t i = 0, e = liveness_.GetNumberOfSsaValues(); i < e; ++i) {
234 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
235 for (LiveInterval* current = instruction->GetLiveInterval();
236 current != nullptr;
237 current = current->GetNextSibling()) {
238 if (!current->HasRegister()) {
239 continue;
240 }
241 Location source = current->ToLocation();
242 for (SafepointPosition* safepoint_position = current->GetFirstSafepoint();
243 safepoint_position != nullptr;
244 safepoint_position = safepoint_position->GetNext()) {
245 DCHECK(current->CoversSlow(safepoint_position->GetPosition()));
246 LocationSummary* locations = safepoint_position->GetLocations();
247 switch (source.GetKind()) {
248 case Location::kRegister:
249 case Location::kFpuRegister: {
250 locations->AddLiveRegister(source);
251 break;
252 }
253 case Location::kRegisterPair:
254 case Location::kFpuRegisterPair: {
255 locations->AddLiveRegister(source.ToLow());
256 locations->AddLiveRegister(source.ToHigh());
257 break;
258 }
259 case Location::kStackSlot: // Fall-through
260 case Location::kDoubleStackSlot: // Fall-through
261 case Location::kConstant: {
262 // Nothing to do.
263 break;
264 }
265 default: {
266 LOG(FATAL) << "Unexpected location for object";
267 }
268 }
269 }
270 }
271 }
272}
273
274size_t RegisterAllocationResolver::CalculateMaximumSafepointSpillSize(
275 ArrayRef<HInstruction* const> safepoints) {
276 size_t core_register_spill_size = codegen_->GetWordSize();
277 size_t fp_register_spill_size = codegen_->GetFloatingPointSpillSlotSize();
278 size_t maximum_safepoint_spill_size = 0u;
279 for (HInstruction* instruction : safepoints) {
280 LocationSummary* locations = instruction->GetLocations();
281 if (locations->OnlyCallsOnSlowPath()) {
282 size_t core_spills =
283 codegen_->GetNumberOfSlowPathSpills(locations, /* core_registers */ true);
284 size_t fp_spills =
285 codegen_->GetNumberOfSlowPathSpills(locations, /* core_registers */ false);
286 size_t spill_size =
287 core_register_spill_size * core_spills + fp_register_spill_size * fp_spills;
288 maximum_safepoint_spill_size = std::max(maximum_safepoint_spill_size, spill_size);
289 } else if (locations->CallsOnMainAndSlowPath()) {
290 // Nothing to spill on the slow path if the main path already clobbers caller-saves.
291 DCHECK_EQ(0u, codegen_->GetNumberOfSlowPathSpills(locations, /* core_registers */ true));
292 DCHECK_EQ(0u, codegen_->GetNumberOfSlowPathSpills(locations, /* core_registers */ false));
293 }
294 }
295 return maximum_safepoint_spill_size;
296}
297
298void RegisterAllocationResolver::ConnectSiblings(LiveInterval* interval) {
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700299 LiveInterval* current = interval;
300 if (current->HasSpillSlot()
301 && current->HasRegister()
302 // Currently, we spill unconditionnally the current method in the code generators.
303 && !interval->GetDefinedBy()->IsCurrentMethod()) {
304 // We spill eagerly, so move must be at definition.
Aart Bikcc895252017-03-21 10:55:15 -0700305 Location loc;
306 switch (interval->NumberOfSpillSlotsNeeded()) {
307 case 1: loc = Location::StackSlot(interval->GetParent()->GetSpillSlot()); break;
308 case 2: loc = Location::DoubleStackSlot(interval->GetParent()->GetSpillSlot()); break;
Aart Bik5576f372017-03-23 16:17:37 -0700309 case 4: loc = Location::SIMDStackSlot(interval->GetParent()->GetSpillSlot()); break;
Aart Bikcc895252017-03-21 10:55:15 -0700310 default: LOG(FATAL) << "Unexpected number of spill slots"; UNREACHABLE();
311 }
312 InsertMoveAfter(interval->GetDefinedBy(), interval->ToLocation(), loc);
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700313 }
Vladimir Marko82b07402017-03-01 19:02:04 +0000314 UsePositionList::const_iterator use_it = current->GetUses().begin();
315 const UsePositionList::const_iterator use_end = current->GetUses().end();
316 EnvUsePositionList::const_iterator env_use_it = current->GetEnvironmentUses().begin();
317 const EnvUsePositionList::const_iterator env_use_end = current->GetEnvironmentUses().end();
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700318
319 // Walk over all siblings, updating locations of use positions, and
320 // connecting them when they are adjacent.
321 do {
322 Location source = current->ToLocation();
323
324 // Walk over all uses covered by this interval, and update the location
325 // information.
326
327 LiveRange* range = current->GetFirstRange();
328 while (range != nullptr) {
Vladimir Marko82b07402017-03-01 19:02:04 +0000329 // Process uses in the closed interval [range->GetStart(), range->GetEnd()].
330 // FindMatchingUseRange() expects a half-open interval, so pass `range->GetEnd() + 1u`.
331 size_t range_begin = range->GetStart();
332 size_t range_end = range->GetEnd() + 1u;
333 auto matching_use_range =
334 FindMatchingUseRange(use_it, use_end, range_begin, range_end);
335 DCHECK(std::all_of(use_it,
336 matching_use_range.begin(),
337 [](const UsePosition& pos) { return pos.IsSynthesized(); }));
338 for (const UsePosition& use : matching_use_range) {
339 DCHECK(current->CoversSlow(use.GetPosition()) || (use.GetPosition() == range->GetEnd()));
340 if (!use.IsSynthesized()) {
341 LocationSummary* locations = use.GetUser()->GetLocations();
342 Location expected_location = locations->InAt(use.GetInputIndex());
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700343 // The expected (actual) location may be invalid in case the input is unused. Currently
344 // this only happens for intrinsics.
345 if (expected_location.IsValid()) {
346 if (expected_location.IsUnallocated()) {
Vladimir Marko82b07402017-03-01 19:02:04 +0000347 locations->SetInAt(use.GetInputIndex(), source);
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700348 } else if (!expected_location.IsConstant()) {
Vladimir Marko82b07402017-03-01 19:02:04 +0000349 AddInputMoveFor(
350 interval->GetDefinedBy(), use.GetUser(), source, expected_location);
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700351 }
352 } else {
Vladimir Marko82b07402017-03-01 19:02:04 +0000353 DCHECK(use.GetUser()->IsInvoke());
354 DCHECK(use.GetUser()->AsInvoke()->GetIntrinsic() != Intrinsics::kNone);
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700355 }
356 }
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700357 }
Vladimir Marko82b07402017-03-01 19:02:04 +0000358 use_it = matching_use_range.end();
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700359
360 // Walk over the environment uses, and update their locations.
Vladimir Marko82b07402017-03-01 19:02:04 +0000361 auto matching_env_use_range =
362 FindMatchingUseRange(env_use_it, env_use_end, range_begin, range_end);
363 for (const EnvUsePosition& env_use : matching_env_use_range) {
364 DCHECK(current->CoversSlow(env_use.GetPosition())
365 || (env_use.GetPosition() == range->GetEnd()));
366 HEnvironment* environment = env_use.GetEnvironment();
367 environment->SetLocationAt(env_use.GetInputIndex(), source);
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700368 }
Vladimir Marko82b07402017-03-01 19:02:04 +0000369 env_use_it = matching_env_use_range.end();
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700370
371 range = range->GetNext();
372 }
373
374 // If the next interval starts just after this one, and has a register,
375 // insert a move.
376 LiveInterval* next_sibling = current->GetNextSibling();
377 if (next_sibling != nullptr
378 && next_sibling->HasRegister()
379 && current->GetEnd() == next_sibling->GetStart()) {
380 Location destination = next_sibling->ToLocation();
381 InsertParallelMoveAt(current->GetEnd(), interval->GetDefinedBy(), source, destination);
382 }
383
384 for (SafepointPosition* safepoint_position = current->GetFirstSafepoint();
385 safepoint_position != nullptr;
386 safepoint_position = safepoint_position->GetNext()) {
387 DCHECK(current->CoversSlow(safepoint_position->GetPosition()));
388
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100389 if (current->GetType() == DataType::Type::kReference) {
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700390 DCHECK(interval->GetDefinedBy()->IsActualObject())
391 << interval->GetDefinedBy()->DebugName()
Roland Levillain19c54192016-11-04 13:44:09 +0000392 << '(' << interval->GetDefinedBy()->GetId() << ')'
393 << "@" << safepoint_position->GetInstruction()->DebugName()
394 << '(' << safepoint_position->GetInstruction()->GetId() << ')';
Vladimir Marko70e97462016-08-09 11:04:26 +0100395 LocationSummary* locations = safepoint_position->GetLocations();
396 if (current->GetParent()->HasSpillSlot()) {
397 locations->SetStackBit(current->GetParent()->GetSpillSlot() / kVRegSize);
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700398 }
Vladimir Marko70e97462016-08-09 11:04:26 +0100399 if (source.GetKind() == Location::kRegister) {
400 locations->SetRegisterBit(source.reg());
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700401 }
402 }
403 }
404 current = next_sibling;
405 } while (current != nullptr);
406
Vladimir Marko82b07402017-03-01 19:02:04 +0000407 // Following uses can only be synthesized uses.
408 DCHECK(std::all_of(use_it, use_end, [](const UsePosition& pos) { return pos.IsSynthesized(); }));
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700409}
410
411static bool IsMaterializableEntryBlockInstructionOfGraphWithIrreducibleLoop(
412 HInstruction* instruction) {
413 return instruction->GetBlock()->GetGraph()->HasIrreducibleLoops() &&
414 (instruction->IsConstant() || instruction->IsCurrentMethod());
415}
416
417void RegisterAllocationResolver::ConnectSplitSiblings(LiveInterval* interval,
418 HBasicBlock* from,
419 HBasicBlock* to) const {
420 if (interval->GetNextSibling() == nullptr) {
421 // Nothing to connect. The whole range was allocated to the same location.
422 return;
423 }
424
425 // Find the intervals that cover `from` and `to`.
426 size_t destination_position = to->GetLifetimeStart();
427 size_t source_position = from->GetLifetimeEnd() - 1;
428 LiveInterval* destination = interval->GetSiblingAt(destination_position);
429 LiveInterval* source = interval->GetSiblingAt(source_position);
430
431 if (destination == source) {
432 // Interval was not split.
433 return;
434 }
435
436 LiveInterval* parent = interval->GetParent();
437 HInstruction* defined_by = parent->GetDefinedBy();
438 if (codegen_->GetGraph()->HasIrreducibleLoops() &&
439 (destination == nullptr || !destination->CoversSlow(destination_position))) {
440 // Our live_in fixed point calculation has found that the instruction is live
441 // in the `to` block because it will eventually enter an irreducible loop. Our
442 // live interval computation however does not compute a fixed point, and
443 // therefore will not have a location for that instruction for `to`.
444 // Because the instruction is a constant or the ArtMethod, we don't need to
445 // do anything: it will be materialized in the irreducible loop.
446 DCHECK(IsMaterializableEntryBlockInstructionOfGraphWithIrreducibleLoop(defined_by))
447 << defined_by->DebugName() << ":" << defined_by->GetId()
448 << " " << from->GetBlockId() << " -> " << to->GetBlockId();
449 return;
450 }
451
452 if (!destination->HasRegister()) {
453 // Values are eagerly spilled. Spill slot already contains appropriate value.
454 return;
455 }
456
457 Location location_source;
458 // `GetSiblingAt` returns the interval whose start and end cover `position`,
459 // but does not check whether the interval is inactive at that position.
460 // The only situation where the interval is inactive at that position is in the
461 // presence of irreducible loops for constants and ArtMethod.
462 if (codegen_->GetGraph()->HasIrreducibleLoops() &&
463 (source == nullptr || !source->CoversSlow(source_position))) {
464 DCHECK(IsMaterializableEntryBlockInstructionOfGraphWithIrreducibleLoop(defined_by));
465 if (defined_by->IsConstant()) {
466 location_source = defined_by->GetLocations()->Out();
467 } else {
468 DCHECK(defined_by->IsCurrentMethod());
Aart Bikcc895252017-03-21 10:55:15 -0700469 switch (parent->NumberOfSpillSlotsNeeded()) {
470 case 1: location_source = Location::StackSlot(parent->GetSpillSlot()); break;
471 case 2: location_source = Location::DoubleStackSlot(parent->GetSpillSlot()); break;
Aart Bik5576f372017-03-23 16:17:37 -0700472 case 4: location_source = Location::SIMDStackSlot(parent->GetSpillSlot()); break;
Aart Bikcc895252017-03-21 10:55:15 -0700473 default: LOG(FATAL) << "Unexpected number of spill slots"; UNREACHABLE();
474 }
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700475 }
476 } else {
477 DCHECK(source != nullptr);
478 DCHECK(source->CoversSlow(source_position));
479 DCHECK(destination->CoversSlow(destination_position));
480 location_source = source->ToLocation();
481 }
482
483 // If `from` has only one successor, we can put the moves at the exit of it. Otherwise
484 // we need to put the moves at the entry of `to`.
485 if (from->GetNormalSuccessors().size() == 1) {
486 InsertParallelMoveAtExitOf(from,
487 defined_by,
488 location_source,
489 destination->ToLocation());
490 } else {
491 DCHECK_EQ(to->GetPredecessors().size(), 1u);
492 InsertParallelMoveAtEntryOf(to,
493 defined_by,
494 location_source,
495 destination->ToLocation());
496 }
497}
498
499static bool IsValidDestination(Location destination) {
500 return destination.IsRegister()
501 || destination.IsRegisterPair()
502 || destination.IsFpuRegister()
503 || destination.IsFpuRegisterPair()
504 || destination.IsStackSlot()
Aart Bik5576f372017-03-23 16:17:37 -0700505 || destination.IsDoubleStackSlot()
506 || destination.IsSIMDStackSlot();
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700507}
508
509void RegisterAllocationResolver::AddMove(HParallelMove* move,
510 Location source,
511 Location destination,
512 HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100513 DataType::Type type) const {
514 if (type == DataType::Type::kInt64
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700515 && codegen_->ShouldSplitLongMoves()
516 // The parallel move resolver knows how to deal with long constants.
517 && !source.IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100518 move->AddMove(source.ToLow(), destination.ToLow(), DataType::Type::kInt32, instruction);
519 move->AddMove(source.ToHigh(), destination.ToHigh(), DataType::Type::kInt32, nullptr);
Matthew Gharrity5d6e27d2016-07-18 13:38:44 -0700520 } else {
521 move->AddMove(source, destination, type, instruction);
522 }
523}
524
525void RegisterAllocationResolver::AddInputMoveFor(HInstruction* input,
526 HInstruction* user,
527 Location source,
528 Location destination) const {
529 if (source.Equals(destination)) return;
530
531 DCHECK(!user->IsPhi());
532
533 HInstruction* previous = user->GetPrevious();
534 HParallelMove* move = nullptr;
535 if (previous == nullptr
536 || !previous->IsParallelMove()
537 || previous->GetLifetimePosition() < user->GetLifetimePosition()) {
538 move = new (allocator_) HParallelMove(allocator_);
539 move->SetLifetimePosition(user->GetLifetimePosition());
540 user->GetBlock()->InsertInstructionBefore(move, user);
541 } else {
542 move = previous->AsParallelMove();
543 }
544 DCHECK_EQ(move->GetLifetimePosition(), user->GetLifetimePosition());
545 AddMove(move, source, destination, nullptr, input->GetType());
546}
547
548static bool IsInstructionStart(size_t position) {
549 return (position & 1) == 0;
550}
551
552static bool IsInstructionEnd(size_t position) {
553 return (position & 1) == 1;
554}
555
556void RegisterAllocationResolver::InsertParallelMoveAt(size_t position,
557 HInstruction* instruction,
558 Location source,
559 Location destination) const {
560 DCHECK(IsValidDestination(destination)) << destination;
561 if (source.Equals(destination)) return;
562
563 HInstruction* at = liveness_.GetInstructionFromPosition(position / 2);
564 HParallelMove* move;
565 if (at == nullptr) {
566 if (IsInstructionStart(position)) {
567 // Block boundary, don't do anything the connection of split siblings will handle it.
568 return;
569 } else {
570 // Move must happen before the first instruction of the block.
571 at = liveness_.GetInstructionFromPosition((position + 1) / 2);
572 // Note that parallel moves may have already been inserted, so we explicitly
573 // ask for the first instruction of the block: `GetInstructionFromPosition` does
574 // not contain the `HParallelMove` instructions.
575 at = at->GetBlock()->GetFirstInstruction();
576
577 if (at->GetLifetimePosition() < position) {
578 // We may insert moves for split siblings and phi spills at the beginning of the block.
579 // Since this is a different lifetime position, we need to go to the next instruction.
580 DCHECK(at->IsParallelMove());
581 at = at->GetNext();
582 }
583
584 if (at->GetLifetimePosition() != position) {
585 DCHECK_GT(at->GetLifetimePosition(), position);
586 move = new (allocator_) HParallelMove(allocator_);
587 move->SetLifetimePosition(position);
588 at->GetBlock()->InsertInstructionBefore(move, at);
589 } else {
590 DCHECK(at->IsParallelMove());
591 move = at->AsParallelMove();
592 }
593 }
594 } else if (IsInstructionEnd(position)) {
595 // Move must happen after the instruction.
596 DCHECK(!at->IsControlFlow());
597 move = at->GetNext()->AsParallelMove();
598 // This is a parallel move for connecting siblings in a same block. We need to
599 // differentiate it with moves for connecting blocks, and input moves.
600 if (move == nullptr || move->GetLifetimePosition() > position) {
601 move = new (allocator_) HParallelMove(allocator_);
602 move->SetLifetimePosition(position);
603 at->GetBlock()->InsertInstructionBefore(move, at->GetNext());
604 }
605 } else {
606 // Move must happen before the instruction.
607 HInstruction* previous = at->GetPrevious();
608 if (previous == nullptr
609 || !previous->IsParallelMove()
610 || previous->GetLifetimePosition() != position) {
611 // If the previous is a parallel move, then its position must be lower
612 // than the given `position`: it was added just after the non-parallel
613 // move instruction that precedes `instruction`.
614 DCHECK(previous == nullptr
615 || !previous->IsParallelMove()
616 || previous->GetLifetimePosition() < position);
617 move = new (allocator_) HParallelMove(allocator_);
618 move->SetLifetimePosition(position);
619 at->GetBlock()->InsertInstructionBefore(move, at);
620 } else {
621 move = previous->AsParallelMove();
622 }
623 }
624 DCHECK_EQ(move->GetLifetimePosition(), position);
625 AddMove(move, source, destination, instruction, instruction->GetType());
626}
627
628void RegisterAllocationResolver::InsertParallelMoveAtExitOf(HBasicBlock* block,
629 HInstruction* instruction,
630 Location source,
631 Location destination) const {
632 DCHECK(IsValidDestination(destination)) << destination;
633 if (source.Equals(destination)) return;
634
635 DCHECK_EQ(block->GetNormalSuccessors().size(), 1u);
636 HInstruction* last = block->GetLastInstruction();
637 // We insert moves at exit for phi predecessors and connecting blocks.
638 // A block ending with an if or a packed switch cannot branch to a block
639 // with phis because we do not allow critical edges. It can also not connect
640 // a split interval between two blocks: the move has to happen in the successor.
641 DCHECK(!last->IsIf() && !last->IsPackedSwitch());
642 HInstruction* previous = last->GetPrevious();
643 HParallelMove* move;
644 // This is a parallel move for connecting blocks. We need to differentiate
645 // it with moves for connecting siblings in a same block, and output moves.
646 size_t position = last->GetLifetimePosition();
647 if (previous == nullptr || !previous->IsParallelMove()
648 || previous->AsParallelMove()->GetLifetimePosition() != position) {
649 move = new (allocator_) HParallelMove(allocator_);
650 move->SetLifetimePosition(position);
651 block->InsertInstructionBefore(move, last);
652 } else {
653 move = previous->AsParallelMove();
654 }
655 AddMove(move, source, destination, instruction, instruction->GetType());
656}
657
658void RegisterAllocationResolver::InsertParallelMoveAtEntryOf(HBasicBlock* block,
659 HInstruction* instruction,
660 Location source,
661 Location destination) const {
662 DCHECK(IsValidDestination(destination)) << destination;
663 if (source.Equals(destination)) return;
664
665 HInstruction* first = block->GetFirstInstruction();
666 HParallelMove* move = first->AsParallelMove();
667 size_t position = block->GetLifetimeStart();
668 // This is a parallel move for connecting blocks. We need to differentiate
669 // it with moves for connecting siblings in a same block, and input moves.
670 if (move == nullptr || move->GetLifetimePosition() != position) {
671 move = new (allocator_) HParallelMove(allocator_);
672 move->SetLifetimePosition(position);
673 block->InsertInstructionBefore(move, first);
674 }
675 AddMove(move, source, destination, instruction, instruction->GetType());
676}
677
678void RegisterAllocationResolver::InsertMoveAfter(HInstruction* instruction,
679 Location source,
680 Location destination) const {
681 DCHECK(IsValidDestination(destination)) << destination;
682 if (source.Equals(destination)) return;
683
684 if (instruction->IsPhi()) {
685 InsertParallelMoveAtEntryOf(instruction->GetBlock(), instruction, source, destination);
686 return;
687 }
688
689 size_t position = instruction->GetLifetimePosition() + 1;
690 HParallelMove* move = instruction->GetNext()->AsParallelMove();
691 // This is a parallel move for moving the output of an instruction. We need
692 // to differentiate with input moves, moves for connecting siblings in a
693 // and moves for connecting blocks.
694 if (move == nullptr || move->GetLifetimePosition() != position) {
695 move = new (allocator_) HParallelMove(allocator_);
696 move->SetLifetimePosition(position);
697 instruction->GetBlock()->InsertInstructionBefore(move, instruction->GetNext());
698 }
699 AddMove(move, source, destination, instruction, instruction->GetType());
700}
701
702} // namespace art