blob: f53f846326de359b3dc4cd595798630137bbaaeb [file] [log] [blame]
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "register_allocator.h"
18
Nicolas Geoffray234d69d2015-03-09 10:28:50 +000019#include <iostream>
Ian Rogersc7dd2952014-10-21 23:31:19 -070020#include <sstream>
21
Ian Rogerse77493c2014-08-20 15:08:45 -070022#include "base/bit_vector-inl.h"
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010023#include "code_generator.h"
24#include "ssa_liveness_analysis.h"
25
26namespace art {
27
28static constexpr size_t kMaxLifetimePosition = -1;
Nicolas Geoffray31d76b42014-06-09 15:02:22 +010029static constexpr size_t kDefaultNumberOfSpillSlots = 4;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010030
Nicolas Geoffray840e5462015-01-07 16:01:24 +000031// For simplicity, we implement register pairs as (reg, reg + 1).
32// Note that this is a requirement for double registers on ARM, since we
33// allocate SRegister.
34static int GetHighForLowRegister(int reg) { return reg + 1; }
35static bool IsLowRegister(int reg) { return (reg & 1) == 0; }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +000036static bool IsLowOfUnalignedPairInterval(LiveInterval* low) {
37 return GetHighForLowRegister(low->GetRegister()) != low->GetHighInterval()->GetRegister();
38}
Nicolas Geoffray840e5462015-01-07 16:01:24 +000039
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010040RegisterAllocator::RegisterAllocator(ArenaAllocator* allocator,
41 CodeGenerator* codegen,
42 const SsaLivenessAnalysis& liveness)
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010043 : allocator_(allocator),
44 codegen_(codegen),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010045 liveness_(liveness),
Nicolas Geoffray39468442014-09-02 15:17:15 +010046 unhandled_core_intervals_(allocator, 0),
47 unhandled_fp_intervals_(allocator, 0),
48 unhandled_(nullptr),
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010049 handled_(allocator, 0),
50 active_(allocator, 0),
51 inactive_(allocator, 0),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +010052 physical_core_register_intervals_(allocator, codegen->GetNumberOfCoreRegisters()),
53 physical_fp_register_intervals_(allocator, codegen->GetNumberOfFloatingPointRegisters()),
Nicolas Geoffray39468442014-09-02 15:17:15 +010054 temp_intervals_(allocator, 4),
Nicolas Geoffray776b3182015-02-23 14:14:57 +000055 int_spill_slots_(allocator, kDefaultNumberOfSpillSlots),
56 long_spill_slots_(allocator, kDefaultNumberOfSpillSlots),
57 float_spill_slots_(allocator, kDefaultNumberOfSpillSlots),
58 double_spill_slots_(allocator, kDefaultNumberOfSpillSlots),
Nicolas Geoffray39468442014-09-02 15:17:15 +010059 safepoints_(allocator, 0),
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010060 processing_core_registers_(false),
61 number_of_registers_(-1),
62 registers_array_(nullptr),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +010063 blocked_core_registers_(codegen->GetBlockedCoreRegisters()),
64 blocked_fp_registers_(codegen->GetBlockedFloatingPointRegisters()),
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +010065 reserved_out_slots_(0),
Mark Mendellf85a9ca2015-01-13 09:20:58 -050066 maximum_number_of_live_core_registers_(0),
67 maximum_number_of_live_fp_registers_(0) {
Nicolas Geoffray98893962015-01-21 12:32:32 +000068 static constexpr bool kIsBaseline = false;
69 codegen->SetupBlockedRegisters(kIsBaseline);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +010070 physical_core_register_intervals_.SetSize(codegen->GetNumberOfCoreRegisters());
71 physical_fp_register_intervals_.SetSize(codegen->GetNumberOfFloatingPointRegisters());
Nicolas Geoffray39468442014-09-02 15:17:15 +010072 // Always reserve for the current method and the graph's max out registers.
73 // TODO: compute it instead.
74 reserved_out_slots_ = 1 + codegen->GetGraph()->GetMaximumNumberOfOutVRegs();
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010075}
76
Nicolas Geoffray234d69d2015-03-09 10:28:50 +000077bool RegisterAllocator::CanAllocateRegistersFor(const HGraph& graph ATTRIBUTE_UNUSED,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010078 InstructionSet instruction_set) {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +000079 return instruction_set == kArm64
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +000080 || instruction_set == kX86_64
81 || instruction_set == kArm
Nicolas Geoffray234d69d2015-03-09 10:28:50 +000082 || instruction_set == kX86
83 || instruction_set == kThumb2;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010084}
85
86static bool ShouldProcess(bool processing_core_registers, LiveInterval* interval) {
Nicolas Geoffray39468442014-09-02 15:17:15 +010087 if (interval == nullptr) return false;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010088 bool is_core_register = (interval->GetType() != Primitive::kPrimDouble)
89 && (interval->GetType() != Primitive::kPrimFloat);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010090 return processing_core_registers == is_core_register;
91}
92
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010093void RegisterAllocator::AllocateRegisters() {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010094 AllocateRegistersInternal();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010095 Resolve();
96
97 if (kIsDebugBuild) {
98 processing_core_registers_ = true;
99 ValidateInternal(true);
100 processing_core_registers_ = false;
101 ValidateInternal(true);
Nicolas Geoffray59768572014-12-01 09:50:04 +0000102 // Check that the linear order is still correct with regards to lifetime positions.
103 // Since only parallel moves have been inserted during the register allocation,
104 // these checks are mostly for making sure these moves have been added correctly.
105 size_t current_liveness = 0;
Nicolas Geoffray0d9f17d2015-04-15 14:17:44 +0100106 for (HLinearOrderIterator it(*codegen_->GetGraph()); !it.Done(); it.Advance()) {
Nicolas Geoffray59768572014-12-01 09:50:04 +0000107 HBasicBlock* block = it.Current();
108 for (HInstructionIterator inst_it(block->GetPhis()); !inst_it.Done(); inst_it.Advance()) {
109 HInstruction* instruction = inst_it.Current();
110 DCHECK_LE(current_liveness, instruction->GetLifetimePosition());
111 current_liveness = instruction->GetLifetimePosition();
112 }
113 for (HInstructionIterator inst_it(block->GetInstructions());
114 !inst_it.Done();
115 inst_it.Advance()) {
116 HInstruction* instruction = inst_it.Current();
117 DCHECK_LE(current_liveness, instruction->GetLifetimePosition()) << instruction->DebugName();
118 current_liveness = instruction->GetLifetimePosition();
119 }
120 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100121 }
122}
123
124void RegisterAllocator::BlockRegister(Location location,
125 size_t start,
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100126 size_t end) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +0100127 int reg = location.reg();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100128 DCHECK(location.IsRegister() || location.IsFpuRegister());
129 LiveInterval* interval = location.IsRegister()
130 ? physical_core_register_intervals_.Get(reg)
131 : physical_fp_register_intervals_.Get(reg);
132 Primitive::Type type = location.IsRegister()
133 ? Primitive::kPrimInt
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000134 : Primitive::kPrimFloat;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100135 if (interval == nullptr) {
136 interval = LiveInterval::MakeFixedInterval(allocator_, reg, type);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100137 if (location.IsRegister()) {
138 physical_core_register_intervals_.Put(reg, interval);
139 } else {
140 physical_fp_register_intervals_.Put(reg, interval);
141 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100142 }
143 DCHECK(interval->GetRegister() == reg);
144 interval->AddRange(start, end);
145}
146
147void RegisterAllocator::AllocateRegistersInternal() {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100148 // Iterate post-order, to ensure the list is sorted, and the last added interval
149 // is the one with the lowest start position.
Nicolas Geoffray0d9f17d2015-04-15 14:17:44 +0100150 for (HLinearPostOrderIterator it(*codegen_->GetGraph()); !it.Done(); it.Advance()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100151 HBasicBlock* block = it.Current();
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800152 for (HBackwardInstructionIterator back_it(block->GetInstructions()); !back_it.Done();
153 back_it.Advance()) {
154 ProcessInstruction(back_it.Current());
Nicolas Geoffray39468442014-09-02 15:17:15 +0100155 }
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800156 for (HInstructionIterator inst_it(block->GetPhis()); !inst_it.Done(); inst_it.Advance()) {
157 ProcessInstruction(inst_it.Current());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100158 }
159 }
160
Nicolas Geoffray39468442014-09-02 15:17:15 +0100161 number_of_registers_ = codegen_->GetNumberOfCoreRegisters();
162 registers_array_ = allocator_->AllocArray<size_t>(number_of_registers_);
163 processing_core_registers_ = true;
164 unhandled_ = &unhandled_core_intervals_;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100165 for (size_t i = 0, e = physical_core_register_intervals_.Size(); i < e; ++i) {
166 LiveInterval* fixed = physical_core_register_intervals_.Get(i);
167 if (fixed != nullptr) {
Mingyao Yang296bd602014-10-06 16:47:28 -0700168 // Fixed interval is added to inactive_ instead of unhandled_.
169 // It's also the only type of inactive interval whose start position
170 // can be after the current interval during linear scan.
171 // Fixed interval is never split and never moves to unhandled_.
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100172 inactive_.Add(fixed);
173 }
174 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100175 LinearScan();
Nicolas Geoffray39468442014-09-02 15:17:15 +0100176
177 inactive_.Reset();
178 active_.Reset();
179 handled_.Reset();
180
181 number_of_registers_ = codegen_->GetNumberOfFloatingPointRegisters();
182 registers_array_ = allocator_->AllocArray<size_t>(number_of_registers_);
183 processing_core_registers_ = false;
184 unhandled_ = &unhandled_fp_intervals_;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100185 for (size_t i = 0, e = physical_fp_register_intervals_.Size(); i < e; ++i) {
186 LiveInterval* fixed = physical_fp_register_intervals_.Get(i);
187 if (fixed != nullptr) {
Mingyao Yang296bd602014-10-06 16:47:28 -0700188 // Fixed interval is added to inactive_ instead of unhandled_.
189 // It's also the only type of inactive interval whose start position
190 // can be after the current interval during linear scan.
191 // Fixed interval is never split and never moves to unhandled_.
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100192 inactive_.Add(fixed);
193 }
194 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100195 LinearScan();
196}
197
198void RegisterAllocator::ProcessInstruction(HInstruction* instruction) {
199 LocationSummary* locations = instruction->GetLocations();
200 size_t position = instruction->GetLifetimePosition();
201
202 if (locations == nullptr) return;
203
204 // Create synthesized intervals for temporaries.
205 for (size_t i = 0; i < locations->GetTempCount(); ++i) {
206 Location temp = locations->GetTemp(i);
Nicolas Geoffray52839d12014-11-07 17:47:25 +0000207 if (temp.IsRegister() || temp.IsFpuRegister()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100208 BlockRegister(temp, position, position + 1);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100209 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100210 DCHECK(temp.IsUnallocated());
Roland Levillain5368c212014-11-27 15:03:41 +0000211 switch (temp.GetPolicy()) {
212 case Location::kRequiresRegister: {
213 LiveInterval* interval =
214 LiveInterval::MakeTempInterval(allocator_, Primitive::kPrimInt);
215 temp_intervals_.Add(interval);
Nicolas Geoffrayf01d3442015-03-27 17:15:49 +0000216 interval->AddTempUse(instruction, i);
Roland Levillain5368c212014-11-27 15:03:41 +0000217 unhandled_core_intervals_.Add(interval);
218 break;
219 }
220
221 case Location::kRequiresFpuRegister: {
222 LiveInterval* interval =
223 LiveInterval::MakeTempInterval(allocator_, Primitive::kPrimDouble);
224 temp_intervals_.Add(interval);
Nicolas Geoffrayf01d3442015-03-27 17:15:49 +0000225 interval->AddTempUse(instruction, i);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000226 if (codegen_->NeedsTwoRegisters(Primitive::kPrimDouble)) {
Nicolas Geoffray5588e582015-04-14 14:10:59 +0100227 interval->AddHighInterval(/* is_temp */ true);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000228 LiveInterval* high = interval->GetHighInterval();
229 temp_intervals_.Add(high);
230 unhandled_fp_intervals_.Add(high);
231 }
Roland Levillain5368c212014-11-27 15:03:41 +0000232 unhandled_fp_intervals_.Add(interval);
233 break;
234 }
235
236 default:
237 LOG(FATAL) << "Unexpected policy for temporary location "
238 << temp.GetPolicy();
239 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100240 }
241 }
242
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100243 bool core_register = (instruction->GetType() != Primitive::kPrimDouble)
244 && (instruction->GetType() != Primitive::kPrimFloat);
245
Nicolas Geoffray39468442014-09-02 15:17:15 +0100246 if (locations->CanCall()) {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000247 if (codegen_->IsLeafMethod()) {
248 // TODO: We do this here because we do not want the suspend check to artificially
249 // create live registers. We should find another place, but this is currently the
250 // simplest.
251 DCHECK(instruction->IsSuspendCheckEntry());
252 instruction->GetBlock()->RemoveInstruction(instruction);
253 return;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100254 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100255 safepoints_.Add(instruction);
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100256 if (locations->OnlyCallsOnSlowPath()) {
257 // We add a synthesized range at this position to record the live registers
258 // at this position. Ideally, we could just update the safepoints when locations
259 // are updated, but we currently need to know the full stack size before updating
260 // locations (because of parameters and the fact that we don't have a frame pointer).
261 // And knowing the full stack size requires to know the maximum number of live
262 // registers at calls in slow paths.
263 // By adding the following interval in the algorithm, we can compute this
264 // maximum before updating locations.
265 LiveInterval* interval = LiveInterval::MakeSlowPathInterval(allocator_, instruction);
Nicolas Geoffrayacd03392014-11-26 15:46:52 +0000266 interval->AddRange(position, position + 1);
Nicolas Geoffray87d03762014-11-19 15:17:56 +0000267 AddSorted(&unhandled_core_intervals_, interval);
268 AddSorted(&unhandled_fp_intervals_, interval);
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100269 }
270 }
271
272 if (locations->WillCall()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100273 // Block all registers.
274 for (size_t i = 0; i < codegen_->GetNumberOfCoreRegisters(); ++i) {
Nicolas Geoffray98893962015-01-21 12:32:32 +0000275 if (!codegen_->IsCoreCalleeSaveRegister(i)) {
276 BlockRegister(Location::RegisterLocation(i),
277 position,
278 position + 1);
279 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100280 }
281 for (size_t i = 0; i < codegen_->GetNumberOfFloatingPointRegisters(); ++i) {
Nicolas Geoffray98893962015-01-21 12:32:32 +0000282 if (!codegen_->IsFloatingPointCalleeSaveRegister(i)) {
283 BlockRegister(Location::FpuRegisterLocation(i),
284 position,
285 position + 1);
286 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100287 }
288 }
289
290 for (size_t i = 0; i < instruction->InputCount(); ++i) {
291 Location input = locations->InAt(i);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100292 if (input.IsRegister() || input.IsFpuRegister()) {
293 BlockRegister(input, position, position + 1);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000294 } else if (input.IsPair()) {
295 BlockRegister(input.ToLow(), position, position + 1);
296 BlockRegister(input.ToHigh(), position, position + 1);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100297 }
298 }
299
Nicolas Geoffray39468442014-09-02 15:17:15 +0100300 LiveInterval* current = instruction->GetLiveInterval();
301 if (current == nullptr) return;
302
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100303 GrowableArray<LiveInterval*>& unhandled = core_register
304 ? unhandled_core_intervals_
305 : unhandled_fp_intervals_;
306
Nicolas Geoffray76905622014-09-25 14:39:26 +0100307 DCHECK(unhandled.IsEmpty() || current->StartsBeforeOrAt(unhandled.Peek()));
Nicolas Geoffray87d03762014-11-19 15:17:56 +0000308
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000309 if (codegen_->NeedsTwoRegisters(current->GetType())) {
310 current->AddHighInterval();
311 }
312
Nicolas Geoffray5588e582015-04-14 14:10:59 +0100313 for (size_t safepoint_index = safepoints_.Size(); safepoint_index > 0; --safepoint_index) {
314 HInstruction* safepoint = safepoints_.Get(safepoint_index - 1);
315 size_t safepoint_position = safepoint->GetLifetimePosition();
316
317 // Test that safepoints are ordered in the optimal way.
318 DCHECK(safepoint_index == safepoints_.Size()
319 || safepoints_.Get(safepoint_index)->GetLifetimePosition() < safepoint_position);
320
321 if (safepoint_position == current->GetStart()) {
322 // The safepoint is for this instruction, so the location of the instruction
323 // does not need to be saved.
324 DCHECK_EQ(safepoint_index, safepoints_.Size());
325 DCHECK_EQ(safepoint, instruction);
326 continue;
327 } else if (current->IsDeadAt(safepoint_position)) {
328 break;
329 } else if (!current->Covers(safepoint_position)) {
330 // Hole in the interval.
331 continue;
332 }
333 current->AddSafepoint(safepoint);
334 }
David Brazdil3fc992f2015-04-16 18:31:55 +0100335 current->ResetSearchCache();
Nicolas Geoffray5588e582015-04-14 14:10:59 +0100336
Nicolas Geoffray39468442014-09-02 15:17:15 +0100337 // Some instructions define their output in fixed register/stack slot. We need
338 // to ensure we know these locations before doing register allocation. For a
339 // given register, we create an interval that covers these locations. The register
340 // will be unavailable at these locations when trying to allocate one for an
341 // interval.
342 //
343 // The backwards walking ensures the ranges are ordered on increasing start positions.
344 Location output = locations->Out();
Calin Juravled0d48522014-11-04 16:40:20 +0000345 if (output.IsUnallocated() && output.GetPolicy() == Location::kSameAsFirstInput) {
346 Location first = locations->InAt(0);
347 if (first.IsRegister() || first.IsFpuRegister()) {
348 current->SetFrom(position + 1);
349 current->SetRegister(first.reg());
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000350 } else if (first.IsPair()) {
351 current->SetFrom(position + 1);
352 current->SetRegister(first.low());
353 LiveInterval* high = current->GetHighInterval();
354 high->SetRegister(first.high());
355 high->SetFrom(position + 1);
Calin Juravled0d48522014-11-04 16:40:20 +0000356 }
357 } else if (output.IsRegister() || output.IsFpuRegister()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100358 // Shift the interval's start by one to account for the blocked register.
359 current->SetFrom(position + 1);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +0100360 current->SetRegister(output.reg());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100361 BlockRegister(output, position, position + 1);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000362 } else if (output.IsPair()) {
363 current->SetFrom(position + 1);
364 current->SetRegister(output.low());
365 LiveInterval* high = current->GetHighInterval();
366 high->SetRegister(output.high());
367 high->SetFrom(position + 1);
368 BlockRegister(output.ToLow(), position, position + 1);
369 BlockRegister(output.ToHigh(), position, position + 1);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100370 } else if (output.IsStackSlot() || output.IsDoubleStackSlot()) {
371 current->SetSpillSlot(output.GetStackIndex());
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000372 } else {
373 DCHECK(output.IsUnallocated() || output.IsConstant());
Nicolas Geoffray39468442014-09-02 15:17:15 +0100374 }
375
376 // If needed, add interval to the list of unhandled intervals.
377 if (current->HasSpillSlot() || instruction->IsConstant()) {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +0100378 // Split just before first register use.
Nicolas Geoffray39468442014-09-02 15:17:15 +0100379 size_t first_register_use = current->FirstRegisterUse();
380 if (first_register_use != kNoLifetime) {
Nicolas Geoffray8cbab3c2015-04-23 15:14:36 +0100381 LiveInterval* split = SplitBetween(current, current->GetStart(), first_register_use - 1);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000382 // Don't add directly to `unhandled`, it needs to be sorted and the start
Nicolas Geoffray39468442014-09-02 15:17:15 +0100383 // of this new interval might be after intervals already in the list.
384 AddSorted(&unhandled, split);
385 } else {
386 // Nothing to do, we won't allocate a register for this value.
387 }
388 } else {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000389 // Don't add directly to `unhandled`, temp or safepoint intervals
390 // for this instruction may have been added, and those can be
391 // processed first.
392 AddSorted(&unhandled, current);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100393 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100394}
395
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100396class AllRangesIterator : public ValueObject {
397 public:
398 explicit AllRangesIterator(LiveInterval* interval)
399 : current_interval_(interval),
400 current_range_(interval->GetFirstRange()) {}
401
402 bool Done() const { return current_interval_ == nullptr; }
403 LiveRange* CurrentRange() const { return current_range_; }
404 LiveInterval* CurrentInterval() const { return current_interval_; }
405
406 void Advance() {
407 current_range_ = current_range_->GetNext();
408 if (current_range_ == nullptr) {
409 current_interval_ = current_interval_->GetNextSibling();
410 if (current_interval_ != nullptr) {
411 current_range_ = current_interval_->GetFirstRange();
412 }
413 }
414 }
415
416 private:
417 LiveInterval* current_interval_;
418 LiveRange* current_range_;
419
420 DISALLOW_COPY_AND_ASSIGN(AllRangesIterator);
421};
422
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100423bool RegisterAllocator::ValidateInternal(bool log_fatal_on_failure) const {
424 // To simplify unit testing, we eagerly create the array of intervals, and
425 // call the helper method.
426 GrowableArray<LiveInterval*> intervals(allocator_, 0);
427 for (size_t i = 0; i < liveness_.GetNumberOfSsaValues(); ++i) {
428 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
429 if (ShouldProcess(processing_core_registers_, instruction->GetLiveInterval())) {
430 intervals.Add(instruction->GetLiveInterval());
431 }
432 }
433
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100434 if (processing_core_registers_) {
435 for (size_t i = 0, e = physical_core_register_intervals_.Size(); i < e; ++i) {
436 LiveInterval* fixed = physical_core_register_intervals_.Get(i);
437 if (fixed != nullptr) {
438 intervals.Add(fixed);
439 }
440 }
441 } else {
442 for (size_t i = 0, e = physical_fp_register_intervals_.Size(); i < e; ++i) {
443 LiveInterval* fixed = physical_fp_register_intervals_.Get(i);
444 if (fixed != nullptr) {
445 intervals.Add(fixed);
446 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100447 }
448 }
449
Nicolas Geoffray39468442014-09-02 15:17:15 +0100450 for (size_t i = 0, e = temp_intervals_.Size(); i < e; ++i) {
451 LiveInterval* temp = temp_intervals_.Get(i);
452 if (ShouldProcess(processing_core_registers_, temp)) {
453 intervals.Add(temp);
454 }
455 }
456
Nicolas Geoffray776b3182015-02-23 14:14:57 +0000457 return ValidateIntervals(intervals, GetNumberOfSpillSlots(), reserved_out_slots_, *codegen_,
Nicolas Geoffray39468442014-09-02 15:17:15 +0100458 allocator_, processing_core_registers_, log_fatal_on_failure);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100459}
460
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100461bool RegisterAllocator::ValidateIntervals(const GrowableArray<LiveInterval*>& intervals,
462 size_t number_of_spill_slots,
Nicolas Geoffray39468442014-09-02 15:17:15 +0100463 size_t number_of_out_slots,
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100464 const CodeGenerator& codegen,
465 ArenaAllocator* allocator,
466 bool processing_core_registers,
467 bool log_fatal_on_failure) {
468 size_t number_of_registers = processing_core_registers
469 ? codegen.GetNumberOfCoreRegisters()
470 : codegen.GetNumberOfFloatingPointRegisters();
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100471 GrowableArray<ArenaBitVector*> liveness_of_values(
472 allocator, number_of_registers + number_of_spill_slots);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100473
474 // Allocate a bit vector per register. A live interval that has a register
475 // allocated will populate the associated bit vector based on its live ranges.
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100476 for (size_t i = 0; i < number_of_registers + number_of_spill_slots; ++i) {
477 liveness_of_values.Add(new (allocator) ArenaBitVector(allocator, 0, true));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100478 }
479
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100480 for (size_t i = 0, e = intervals.Size(); i < e; ++i) {
481 for (AllRangesIterator it(intervals.Get(i)); !it.Done(); it.Advance()) {
482 LiveInterval* current = it.CurrentInterval();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100483 HInstruction* defined_by = current->GetParent()->GetDefinedBy();
484 if (current->GetParent()->HasSpillSlot()
485 // Parameters have their own stack slot.
486 && !(defined_by != nullptr && defined_by->IsParameterValue())) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100487 BitVector* liveness_of_spill_slot = liveness_of_values.Get(number_of_registers
488 + current->GetParent()->GetSpillSlot() / kVRegSize
489 - number_of_out_slots);
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100490 for (size_t j = it.CurrentRange()->GetStart(); j < it.CurrentRange()->GetEnd(); ++j) {
491 if (liveness_of_spill_slot->IsBitSet(j)) {
492 if (log_fatal_on_failure) {
493 std::ostringstream message;
494 message << "Spill slot conflict at " << j;
495 LOG(FATAL) << message.str();
496 } else {
497 return false;
498 }
499 } else {
500 liveness_of_spill_slot->SetBit(j);
501 }
502 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100503 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100504
505 if (current->HasRegister()) {
506 BitVector* liveness_of_register = liveness_of_values.Get(current->GetRegister());
507 for (size_t j = it.CurrentRange()->GetStart(); j < it.CurrentRange()->GetEnd(); ++j) {
508 if (liveness_of_register->IsBitSet(j)) {
Nicolas Geoffray829280c2015-01-28 10:20:37 +0000509 if (current->IsUsingInputRegister() && current->CanUseInputRegister()) {
510 continue;
511 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100512 if (log_fatal_on_failure) {
513 std::ostringstream message;
Nicolas Geoffray39468442014-09-02 15:17:15 +0100514 message << "Register conflict at " << j << " ";
515 if (defined_by != nullptr) {
516 message << "(" << defined_by->DebugName() << ")";
517 }
518 message << "for ";
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100519 if (processing_core_registers) {
520 codegen.DumpCoreRegister(message, current->GetRegister());
521 } else {
522 codegen.DumpFloatingPointRegister(message, current->GetRegister());
523 }
524 LOG(FATAL) << message.str();
525 } else {
526 return false;
527 }
528 } else {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100529 liveness_of_register->SetBit(j);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100530 }
531 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100532 }
533 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100534 }
535 return true;
536}
537
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100538void RegisterAllocator::DumpInterval(std::ostream& stream, LiveInterval* interval) const {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100539 interval->Dump(stream);
540 stream << ": ";
541 if (interval->HasRegister()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100542 if (interval->IsFloatingPoint()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100543 codegen_->DumpFloatingPointRegister(stream, interval->GetRegister());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100544 } else {
545 codegen_->DumpCoreRegister(stream, interval->GetRegister());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100546 }
547 } else {
548 stream << "spilled";
549 }
550 stream << std::endl;
551}
552
Mingyao Yang296bd602014-10-06 16:47:28 -0700553void RegisterAllocator::DumpAllIntervals(std::ostream& stream) const {
554 stream << "inactive: " << std::endl;
555 for (size_t i = 0; i < inactive_.Size(); i ++) {
556 DumpInterval(stream, inactive_.Get(i));
557 }
558 stream << "active: " << std::endl;
559 for (size_t i = 0; i < active_.Size(); i ++) {
560 DumpInterval(stream, active_.Get(i));
561 }
562 stream << "unhandled: " << std::endl;
563 auto unhandled = (unhandled_ != nullptr) ?
564 unhandled_ : &unhandled_core_intervals_;
565 for (size_t i = 0; i < unhandled->Size(); i ++) {
566 DumpInterval(stream, unhandled->Get(i));
567 }
568 stream << "handled: " << std::endl;
569 for (size_t i = 0; i < handled_.Size(); i ++) {
570 DumpInterval(stream, handled_.Get(i));
571 }
572}
573
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100574// By the book implementation of a linear scan register allocator.
575void RegisterAllocator::LinearScan() {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100576 while (!unhandled_->IsEmpty()) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100577 // (1) Remove interval with the lowest start position from unhandled.
Nicolas Geoffray39468442014-09-02 15:17:15 +0100578 LiveInterval* current = unhandled_->Pop();
579 DCHECK(!current->IsFixed() && !current->HasSpillSlot());
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +0100580 DCHECK(unhandled_->IsEmpty() || unhandled_->Peek()->GetStart() >= current->GetStart());
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000581 DCHECK(!current->IsLowInterval() || unhandled_->Peek()->IsHighInterval());
Nicolas Geoffray87d03762014-11-19 15:17:56 +0000582
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100583 size_t position = current->GetStart();
584
Mingyao Yang296bd602014-10-06 16:47:28 -0700585 // Remember the inactive_ size here since the ones moved to inactive_ from
586 // active_ below shouldn't need to be re-checked.
587 size_t inactive_intervals_to_handle = inactive_.Size();
588
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100589 // (2) Remove currently active intervals that are dead at this position.
590 // Move active intervals that have a lifetime hole at this position
591 // to inactive.
592 for (size_t i = 0; i < active_.Size(); ++i) {
593 LiveInterval* interval = active_.Get(i);
594 if (interval->IsDeadAt(position)) {
595 active_.Delete(interval);
596 --i;
597 handled_.Add(interval);
598 } else if (!interval->Covers(position)) {
599 active_.Delete(interval);
600 --i;
601 inactive_.Add(interval);
602 }
603 }
604
605 // (3) Remove currently inactive intervals that are dead at this position.
606 // Move inactive intervals that cover this position to active.
Mingyao Yang296bd602014-10-06 16:47:28 -0700607 for (size_t i = 0; i < inactive_intervals_to_handle; ++i) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100608 LiveInterval* interval = inactive_.Get(i);
Mingyao Yang296bd602014-10-06 16:47:28 -0700609 DCHECK(interval->GetStart() < position || interval->IsFixed());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100610 if (interval->IsDeadAt(position)) {
611 inactive_.Delete(interval);
612 --i;
Mingyao Yang296bd602014-10-06 16:47:28 -0700613 --inactive_intervals_to_handle;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100614 handled_.Add(interval);
615 } else if (interval->Covers(position)) {
616 inactive_.Delete(interval);
617 --i;
Mingyao Yang296bd602014-10-06 16:47:28 -0700618 --inactive_intervals_to_handle;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100619 active_.Add(interval);
620 }
621 }
622
Nicolas Geoffrayacd03392014-11-26 15:46:52 +0000623 if (current->IsSlowPathSafepoint()) {
624 // Synthesized interval to record the maximum number of live registers
625 // at safepoints. No need to allocate a register for it.
Mark Mendellf85a9ca2015-01-13 09:20:58 -0500626 if (processing_core_registers_) {
627 maximum_number_of_live_core_registers_ =
628 std::max(maximum_number_of_live_core_registers_, active_.Size());
629 } else {
630 maximum_number_of_live_fp_registers_ =
631 std::max(maximum_number_of_live_fp_registers_, active_.Size());
632 }
Nicolas Geoffrayacd03392014-11-26 15:46:52 +0000633 DCHECK(unhandled_->IsEmpty() || unhandled_->Peek()->GetStart() > current->GetStart());
634 continue;
635 }
636
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000637 if (current->IsHighInterval() && !current->GetLowInterval()->HasRegister()) {
638 DCHECK(!current->HasRegister());
639 // Allocating the low part was unsucessful. The splitted interval for the high part
640 // will be handled next (it is in the `unhandled_` list).
641 continue;
642 }
643
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100644 // (4) Try to find an available register.
645 bool success = TryAllocateFreeReg(current);
646
647 // (5) If no register could be found, we need to spill.
648 if (!success) {
649 success = AllocateBlockedReg(current);
650 }
651
652 // (6) If the interval had a register allocated, add it to the list of active
653 // intervals.
654 if (success) {
Nicolas Geoffray98893962015-01-21 12:32:32 +0000655 codegen_->AddAllocatedRegister(processing_core_registers_
656 ? Location::RegisterLocation(current->GetRegister())
657 : Location::FpuRegisterLocation(current->GetRegister()));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100658 active_.Add(current);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000659 if (current->HasHighInterval() && !current->GetHighInterval()->HasRegister()) {
660 current->GetHighInterval()->SetRegister(GetHighForLowRegister(current->GetRegister()));
661 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100662 }
663 }
664}
665
Nicolas Geoffray829280c2015-01-28 10:20:37 +0000666static void FreeIfNotCoverAt(LiveInterval* interval, size_t position, size_t* free_until) {
667 DCHECK(!interval->IsHighInterval());
668 // Note that the same instruction may occur multiple times in the input list,
669 // so `free_until` may have changed already.
David Brazdil3fc992f2015-04-16 18:31:55 +0100670 // Since `position` is not the current scan position, we need to use CoversSlow.
Nicolas Geoffray829280c2015-01-28 10:20:37 +0000671 if (interval->IsDeadAt(position)) {
672 // Set the register to be free. Note that inactive intervals might later
673 // update this.
674 free_until[interval->GetRegister()] = kMaxLifetimePosition;
675 if (interval->HasHighInterval()) {
676 DCHECK(interval->GetHighInterval()->IsDeadAt(position));
677 free_until[interval->GetHighInterval()->GetRegister()] = kMaxLifetimePosition;
678 }
David Brazdil3fc992f2015-04-16 18:31:55 +0100679 } else if (!interval->CoversSlow(position)) {
Nicolas Geoffray829280c2015-01-28 10:20:37 +0000680 // The interval becomes inactive at `defined_by`. We make its register
681 // available only until the next use strictly after `defined_by`.
682 free_until[interval->GetRegister()] = interval->FirstUseAfter(position);
683 if (interval->HasHighInterval()) {
David Brazdil3fc992f2015-04-16 18:31:55 +0100684 DCHECK(!interval->GetHighInterval()->CoversSlow(position));
Nicolas Geoffray829280c2015-01-28 10:20:37 +0000685 free_until[interval->GetHighInterval()->GetRegister()] = free_until[interval->GetRegister()];
686 }
687 }
688}
689
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100690// Find a free register. If multiple are found, pick the register that
691// is free the longest.
692bool RegisterAllocator::TryAllocateFreeReg(LiveInterval* current) {
693 size_t* free_until = registers_array_;
694
695 // First set all registers to be free.
696 for (size_t i = 0; i < number_of_registers_; ++i) {
697 free_until[i] = kMaxLifetimePosition;
698 }
699
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100700 // For each active interval, set its register to not free.
701 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
702 LiveInterval* interval = active_.Get(i);
703 DCHECK(interval->HasRegister());
704 free_until[interval->GetRegister()] = 0;
705 }
706
Nicolas Geoffray829280c2015-01-28 10:20:37 +0000707 // An interval that starts an instruction (that is, it is not split), may
708 // re-use the registers used by the inputs of that instruciton, based on the
709 // location summary.
710 HInstruction* defined_by = current->GetDefinedBy();
711 if (defined_by != nullptr && !current->IsSplit()) {
712 LocationSummary* locations = defined_by->GetLocations();
713 if (!locations->OutputCanOverlapWithInputs() && locations->Out().IsUnallocated()) {
714 for (HInputIterator it(defined_by); !it.Done(); it.Advance()) {
715 // Take the last interval of the input. It is the location of that interval
716 // that will be used at `defined_by`.
717 LiveInterval* interval = it.Current()->GetLiveInterval()->GetLastSibling();
718 // Note that interval may have not been processed yet.
719 // TODO: Handle non-split intervals last in the work list.
720 if (interval->HasRegister() && interval->SameRegisterKind(*current)) {
721 // The input must be live until the end of `defined_by`, to comply to
722 // the linear scan algorithm. So we use `defined_by`'s end lifetime
723 // position to check whether the input is dead or is inactive after
724 // `defined_by`.
David Brazdil3fc992f2015-04-16 18:31:55 +0100725 DCHECK(interval->CoversSlow(defined_by->GetLifetimePosition()));
Nicolas Geoffray829280c2015-01-28 10:20:37 +0000726 size_t position = defined_by->GetLifetimePosition() + 1;
727 FreeIfNotCoverAt(interval, position, free_until);
728 }
729 }
730 }
731 }
732
Mingyao Yang296bd602014-10-06 16:47:28 -0700733 // For each inactive interval, set its register to be free until
734 // the next intersection with `current`.
735 for (size_t i = 0, e = inactive_.Size(); i < e; ++i) {
736 LiveInterval* inactive = inactive_.Get(i);
737 // Temp/Slow-path-safepoint interval has no holes.
738 DCHECK(!inactive->IsTemp() && !inactive->IsSlowPathSafepoint());
739 if (!current->IsSplit() && !inactive->IsFixed()) {
740 // Neither current nor inactive are fixed.
741 // Thanks to SSA, a non-split interval starting in a hole of an
742 // inactive interval should never intersect with that inactive interval.
743 // Only if it's not fixed though, because fixed intervals don't come from SSA.
744 DCHECK_EQ(inactive->FirstIntersectionWith(current), kNoLifetime);
745 continue;
746 }
747
748 DCHECK(inactive->HasRegister());
749 if (free_until[inactive->GetRegister()] == 0) {
750 // Already used by some active interval. No need to intersect.
751 continue;
752 }
753 size_t next_intersection = inactive->FirstIntersectionWith(current);
754 if (next_intersection != kNoLifetime) {
755 free_until[inactive->GetRegister()] =
756 std::min(free_until[inactive->GetRegister()], next_intersection);
757 }
758 }
759
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000760 int reg = kNoRegister;
Nicolas Geoffray39468442014-09-02 15:17:15 +0100761 if (current->HasRegister()) {
762 // Some instructions have a fixed register output.
763 reg = current->GetRegister();
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000764 if (free_until[reg] == 0) {
765 DCHECK(current->IsHighInterval());
766 // AllocateBlockedReg will spill the holder of the register.
767 return false;
768 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100769 } else {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000770 DCHECK(!current->IsHighInterval());
Nicolas Geoffrayfbda5f32015-04-29 14:16:00 +0100771 int hint = current->FindFirstRegisterHint(free_until, liveness_);
Nicolas Geoffray01ef3452014-10-01 11:32:17 +0100772 if (hint != kNoRegister) {
773 DCHECK(!IsBlocked(hint));
774 reg = hint;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000775 } else if (current->IsLowInterval()) {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000776 reg = FindAvailableRegisterPair(free_until, current->GetStart());
Nicolas Geoffray01ef3452014-10-01 11:32:17 +0100777 } else {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000778 reg = FindAvailableRegister(free_until);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100779 }
780 }
781
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000782 DCHECK_NE(reg, kNoRegister);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100783 // If we could not find a register, we need to spill.
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000784 if (free_until[reg] == 0) {
785 return false;
786 }
787
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000788 if (current->IsLowInterval()) {
789 // If the high register of this interval is not available, we need to spill.
790 int high_reg = current->GetHighInterval()->GetRegister();
791 if (high_reg == kNoRegister) {
792 high_reg = GetHighForLowRegister(reg);
793 }
794 if (free_until[high_reg] == 0) {
795 return false;
796 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100797 }
798
799 current->SetRegister(reg);
800 if (!current->IsDeadAt(free_until[reg])) {
801 // If the register is only available for a subset of live ranges
802 // covered by `current`, split `current` at the position where
803 // the register is not available anymore.
804 LiveInterval* split = Split(current, free_until[reg]);
805 DCHECK(split != nullptr);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100806 AddSorted(unhandled_, split);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100807 }
808 return true;
809}
810
811bool RegisterAllocator::IsBlocked(int reg) const {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100812 return processing_core_registers_
813 ? blocked_core_registers_[reg]
814 : blocked_fp_registers_[reg];
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100815}
816
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000817int RegisterAllocator::FindAvailableRegisterPair(size_t* next_use, size_t starting_at) const {
818 int reg = kNoRegister;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000819 // Pick the register pair that is used the last.
820 for (size_t i = 0; i < number_of_registers_; ++i) {
821 if (IsBlocked(i)) continue;
822 if (!IsLowRegister(i)) continue;
823 int high_register = GetHighForLowRegister(i);
824 if (IsBlocked(high_register)) continue;
825 int existing_high_register = GetHighForLowRegister(reg);
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000826 if ((reg == kNoRegister) || (next_use[i] >= next_use[reg]
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000827 && next_use[high_register] >= next_use[existing_high_register])) {
828 reg = i;
829 if (next_use[i] == kMaxLifetimePosition
830 && next_use[high_register] == kMaxLifetimePosition) {
831 break;
832 }
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000833 } else if (next_use[reg] <= starting_at || next_use[existing_high_register] <= starting_at) {
834 // If one of the current register is known to be unavailable, just unconditionally
835 // try a new one.
836 reg = i;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000837 }
838 }
839 return reg;
840}
841
842int RegisterAllocator::FindAvailableRegister(size_t* next_use) const {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000843 int reg = kNoRegister;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000844 // Pick the register that is used the last.
845 for (size_t i = 0; i < number_of_registers_; ++i) {
846 if (IsBlocked(i)) continue;
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000847 if (reg == kNoRegister || next_use[i] > next_use[reg]) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000848 reg = i;
849 if (next_use[i] == kMaxLifetimePosition) break;
850 }
851 }
852 return reg;
853}
854
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000855bool RegisterAllocator::TrySplitNonPairOrUnalignedPairIntervalAt(size_t position,
856 size_t first_register_use,
857 size_t* next_use) {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000858 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
859 LiveInterval* active = active_.Get(i);
860 DCHECK(active->HasRegister());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000861 if (active->IsFixed()) continue;
862 if (active->IsHighInterval()) continue;
863 if (first_register_use > next_use[active->GetRegister()]) continue;
864
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000865 // Split the first interval found.
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000866 if (!active->IsLowInterval() || IsLowOfUnalignedPairInterval(active)) {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000867 LiveInterval* split = Split(active, position);
868 active_.DeleteAt(i);
869 if (split != active) {
870 handled_.Add(active);
871 }
872 AddSorted(unhandled_, split);
873 return true;
874 }
875 }
876 return false;
877}
878
Nicolas Geoffray5b168de2015-03-27 10:27:22 +0000879bool RegisterAllocator::PotentiallyRemoveOtherHalf(LiveInterval* interval,
880 GrowableArray<LiveInterval*>* intervals,
881 size_t index) {
882 if (interval->IsLowInterval()) {
883 DCHECK_EQ(intervals->Get(index), interval->GetHighInterval());
884 intervals->DeleteAt(index);
885 return true;
886 } else if (interval->IsHighInterval()) {
887 DCHECK_GT(index, 0u);
888 DCHECK_EQ(intervals->Get(index - 1), interval->GetLowInterval());
889 intervals->DeleteAt(index - 1);
890 return true;
891 } else {
892 return false;
893 }
894}
895
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100896// Find the register that is used the last, and spill the interval
897// that holds it. If the first use of `current` is after that register
898// we spill `current` instead.
899bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) {
900 size_t first_register_use = current->FirstRegisterUse();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100901 if (first_register_use == kNoLifetime) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100902 AllocateSpillSlotFor(current);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100903 return false;
904 }
905
Nicolas Geoffray1ba19812015-04-21 09:12:40 +0100906 // We use the first use to compare with other intervals. If this interval
907 // is used after any active intervals, we will spill this interval.
908 size_t first_use = current->FirstUseAfter(current->GetStart());
909
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100910 // First set all registers as not being used.
911 size_t* next_use = registers_array_;
912 for (size_t i = 0; i < number_of_registers_; ++i) {
913 next_use[i] = kMaxLifetimePosition;
914 }
915
916 // For each active interval, find the next use of its register after the
917 // start of current.
918 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
919 LiveInterval* active = active_.Get(i);
920 DCHECK(active->HasRegister());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100921 if (active->IsFixed()) {
922 next_use[active->GetRegister()] = current->GetStart();
923 } else {
Nicolas Geoffray1ba19812015-04-21 09:12:40 +0100924 size_t use = active->FirstUseAfter(current->GetStart());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100925 if (use != kNoLifetime) {
926 next_use[active->GetRegister()] = use;
927 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100928 }
929 }
930
931 // For each inactive interval, find the next use of its register after the
932 // start of current.
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100933 for (size_t i = 0, e = inactive_.Size(); i < e; ++i) {
934 LiveInterval* inactive = inactive_.Get(i);
Mingyao Yang296bd602014-10-06 16:47:28 -0700935 // Temp/Slow-path-safepoint interval has no holes.
936 DCHECK(!inactive->IsTemp() && !inactive->IsSlowPathSafepoint());
937 if (!current->IsSplit() && !inactive->IsFixed()) {
938 // Neither current nor inactive are fixed.
939 // Thanks to SSA, a non-split interval starting in a hole of an
940 // inactive interval should never intersect with that inactive interval.
941 // Only if it's not fixed though, because fixed intervals don't come from SSA.
942 DCHECK_EQ(inactive->FirstIntersectionWith(current), kNoLifetime);
943 continue;
944 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100945 DCHECK(inactive->HasRegister());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100946 size_t next_intersection = inactive->FirstIntersectionWith(current);
947 if (next_intersection != kNoLifetime) {
948 if (inactive->IsFixed()) {
949 next_use[inactive->GetRegister()] =
950 std::min(next_intersection, next_use[inactive->GetRegister()]);
951 } else {
Nicolas Geoffray1ba19812015-04-21 09:12:40 +0100952 size_t use = inactive->FirstUseAfter(current->GetStart());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100953 if (use != kNoLifetime) {
954 next_use[inactive->GetRegister()] = std::min(use, next_use[inactive->GetRegister()]);
955 }
956 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100957 }
958 }
959
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000960 int reg = kNoRegister;
961 bool should_spill = false;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000962 if (current->HasRegister()) {
963 DCHECK(current->IsHighInterval());
964 reg = current->GetRegister();
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000965 // When allocating the low part, we made sure the high register was available.
Nicolas Geoffray1ba19812015-04-21 09:12:40 +0100966 DCHECK_LT(first_use, next_use[reg]);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000967 } else if (current->IsLowInterval()) {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000968 reg = FindAvailableRegisterPair(next_use, first_register_use);
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000969 // We should spill if both registers are not available.
Nicolas Geoffray1ba19812015-04-21 09:12:40 +0100970 should_spill = (first_use >= next_use[reg])
971 || (first_use >= next_use[GetHighForLowRegister(reg)]);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000972 } else {
973 DCHECK(!current->IsHighInterval());
974 reg = FindAvailableRegister(next_use);
Nicolas Geoffray1ba19812015-04-21 09:12:40 +0100975 should_spill = (first_use >= next_use[reg]);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100976 }
977
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000978 DCHECK_NE(reg, kNoRegister);
979 if (should_spill) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000980 DCHECK(!current->IsHighInterval());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000981 bool is_allocation_at_use_site = (current->GetStart() >= (first_register_use - 1));
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000982 if (current->IsLowInterval()
983 && is_allocation_at_use_site
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000984 && TrySplitNonPairOrUnalignedPairIntervalAt(current->GetStart(),
985 first_register_use,
986 next_use)) {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000987 // If we're allocating a register for `current` because the instruction at
988 // that position requires it, but we think we should spill, then there are
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000989 // non-pair intervals or unaligned pair intervals blocking the allocation.
990 // We split the first interval found, and put ourselves first in the
991 // `unhandled_` list.
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +0000992 LiveInterval* existing = unhandled_->Peek();
993 DCHECK(existing->IsHighInterval());
994 DCHECK_EQ(existing->GetLowInterval(), current);
995 unhandled_->Add(current);
996 } else {
997 // If the first use of that instruction is after the last use of the found
998 // register, we split this interval just before its first register use.
999 AllocateSpillSlotFor(current);
Nicolas Geoffray8cbab3c2015-04-23 15:14:36 +01001000 LiveInterval* split = SplitBetween(current, current->GetStart(), first_register_use - 1);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001001 if (current == split) {
1002 DumpInterval(std::cerr, current);
1003 DumpAllIntervals(std::cerr);
1004 // This situation has the potential to infinite loop, so we make it a non-debug CHECK.
Nicolas Geoffray1ba19812015-04-21 09:12:40 +01001005 HInstruction* at = liveness_.GetInstructionFromPosition(first_register_use / 2);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001006 CHECK(false) << "There is not enough registers available for "
1007 << split->GetParent()->GetDefinedBy()->DebugName() << " "
1008 << split->GetParent()->GetDefinedBy()->GetId()
Nicolas Geoffray1ba19812015-04-21 09:12:40 +01001009 << " at " << first_register_use - 1 << " "
1010 << (at == nullptr ? "" : at->DebugName());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001011 }
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001012 AddSorted(unhandled_, split);
1013 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001014 return false;
1015 } else {
1016 // Use this register and spill the active and inactives interval that
1017 // have that register.
1018 current->SetRegister(reg);
1019
1020 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
1021 LiveInterval* active = active_.Get(i);
1022 if (active->GetRegister() == reg) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001023 DCHECK(!active->IsFixed());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001024 LiveInterval* split = Split(active, current->GetStart());
Nicolas Geoffraydd8f8872015-01-15 15:37:37 +00001025 if (split != active) {
1026 handled_.Add(active);
1027 }
Nicolas Geoffray5b168de2015-03-27 10:27:22 +00001028 active_.DeleteAt(i);
1029 PotentiallyRemoveOtherHalf(active, &active_, i);
Nicolas Geoffray39468442014-09-02 15:17:15 +01001030 AddSorted(unhandled_, split);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001031 break;
1032 }
1033 }
1034
Nicolas Geoffray5b168de2015-03-27 10:27:22 +00001035 for (size_t i = 0; i < inactive_.Size(); ++i) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001036 LiveInterval* inactive = inactive_.Get(i);
1037 if (inactive->GetRegister() == reg) {
Mingyao Yang296bd602014-10-06 16:47:28 -07001038 if (!current->IsSplit() && !inactive->IsFixed()) {
1039 // Neither current nor inactive are fixed.
1040 // Thanks to SSA, a non-split interval starting in a hole of an
1041 // inactive interval should never intersect with that inactive interval.
1042 // Only if it's not fixed though, because fixed intervals don't come from SSA.
1043 DCHECK_EQ(inactive->FirstIntersectionWith(current), kNoLifetime);
1044 continue;
1045 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001046 size_t next_intersection = inactive->FirstIntersectionWith(current);
1047 if (next_intersection != kNoLifetime) {
1048 if (inactive->IsFixed()) {
1049 LiveInterval* split = Split(current, next_intersection);
Nicolas Geoffraydd8f8872015-01-15 15:37:37 +00001050 DCHECK_NE(split, current);
Nicolas Geoffray39468442014-09-02 15:17:15 +01001051 AddSorted(unhandled_, split);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001052 } else {
Nicolas Geoffraydd8f8872015-01-15 15:37:37 +00001053 // Split at the start of `current`, which will lead to splitting
1054 // at the end of the lifetime hole of `inactive`.
1055 LiveInterval* split = Split(inactive, current->GetStart());
1056 // If it's inactive, it must start before the current interval.
1057 DCHECK_NE(split, inactive);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001058 inactive_.DeleteAt(i);
Nicolas Geoffray5b168de2015-03-27 10:27:22 +00001059 if (PotentiallyRemoveOtherHalf(inactive, &inactive_, i) && inactive->IsHighInterval()) {
1060 // We have removed an entry prior to `inactive`. So we need to decrement.
1061 --i;
1062 }
1063 // Decrement because we have removed `inactive` from the list.
Mingyao Yang296bd602014-10-06 16:47:28 -07001064 --i;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001065 handled_.Add(inactive);
Nicolas Geoffray39468442014-09-02 15:17:15 +01001066 AddSorted(unhandled_, split);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001067 }
1068 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001069 }
1070 }
1071
1072 return true;
1073 }
1074}
1075
Nicolas Geoffray39468442014-09-02 15:17:15 +01001076void RegisterAllocator::AddSorted(GrowableArray<LiveInterval*>* array, LiveInterval* interval) {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01001077 DCHECK(!interval->IsFixed() && !interval->HasSpillSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001078 size_t insert_at = 0;
Nicolas Geoffray39468442014-09-02 15:17:15 +01001079 for (size_t i = array->Size(); i > 0; --i) {
1080 LiveInterval* current = array->Get(i - 1);
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001081 // High intervals must be processed right after their low equivalent.
1082 if (current->StartsAfter(interval) && !current->IsHighInterval()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001083 insert_at = i;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001084 break;
Nicolas Geoffrayacd03392014-11-26 15:46:52 +00001085 } else if ((current->GetStart() == interval->GetStart()) && current->IsSlowPathSafepoint()) {
1086 // Ensure the slow path interval is the last to be processed at its location: we want the
1087 // interval to know all live registers at this location.
1088 DCHECK(i == 1 || array->Get(i - 2)->StartsAfter(current));
1089 insert_at = i;
1090 break;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001091 }
1092 }
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001093
Nicolas Geoffray39468442014-09-02 15:17:15 +01001094 array->InsertAt(insert_at, interval);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001095 // Insert the high interval before the low, to ensure the low is processed before.
1096 if (interval->HasHighInterval()) {
1097 array->InsertAt(insert_at, interval->GetHighInterval());
1098 } else if (interval->HasLowInterval()) {
1099 array->InsertAt(insert_at + 1, interval->GetLowInterval());
1100 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001101}
1102
Nicolas Geoffray8cbab3c2015-04-23 15:14:36 +01001103LiveInterval* RegisterAllocator::SplitBetween(LiveInterval* interval, size_t from, size_t to) {
Nicolas Geoffrayfbda5f32015-04-29 14:16:00 +01001104 HBasicBlock* block_from = liveness_.GetBlockFromPosition(from / 2);
1105 HBasicBlock* block_to = liveness_.GetBlockFromPosition(to / 2);
Nicolas Geoffray8cbab3c2015-04-23 15:14:36 +01001106 DCHECK(block_from != nullptr);
1107 DCHECK(block_to != nullptr);
1108
1109 // Both locations are in the same block. We split at the given location.
1110 if (block_from == block_to) {
1111 return Split(interval, to);
1112 }
1113
Nicolas Geoffrayfbda5f32015-04-29 14:16:00 +01001114 /*
1115 * Non-linear control flow will force moves at every branch instruction to the new location.
1116 * To avoid having all branches doing the moves, we find the next non-linear position and
1117 * split the interval at this position. Take the following example (block number is the linear
1118 * order position):
1119 *
1120 * B1
1121 * / \
1122 * B2 B3
1123 * \ /
1124 * B4
1125 *
1126 * B2 needs to split an interval, whose next use is in B4. If we were to split at the
1127 * beginning of B4, B3 would need to do a move between B3 and B4 to ensure the interval
1128 * is now in the correct location. It makes performance worst if the interval is spilled
1129 * and both B2 and B3 need to reload it before entering B4.
1130 *
1131 * By splitting at B3, we give a chance to the register allocator to allocate the
1132 * interval to the same register as in B1, and therefore avoid doing any
1133 * moves in B3.
1134 */
1135 if (block_from->GetDominator() != nullptr) {
1136 const GrowableArray<HBasicBlock*>& dominated = block_from->GetDominator()->GetDominatedBlocks();
1137 for (size_t i = 0; i < dominated.Size(); ++i) {
1138 size_t position = dominated.Get(i)->GetLifetimeStart();
1139 if ((position > from) && (block_to->GetLifetimeStart() > position)) {
1140 // Even if we found a better block, we continue iterating in case
1141 // a dominated block is closer.
1142 // Note that dominated blocks are not sorted in liveness order.
1143 block_to = dominated.Get(i);
1144 DCHECK_NE(block_to, block_from);
1145 }
1146 }
1147 }
1148
Nicolas Geoffray8cbab3c2015-04-23 15:14:36 +01001149 // If `to` is in a loop, find the outermost loop header which does not contain `from`.
1150 for (HLoopInformationOutwardIterator it(*block_to); !it.Done(); it.Advance()) {
1151 HBasicBlock* header = it.Current()->GetHeader();
1152 if (block_from->GetLifetimeStart() >= header->GetLifetimeStart()) {
1153 break;
1154 }
1155 block_to = header;
1156 }
1157
1158 // Split at the start of the found block, to piggy back on existing moves
1159 // due to resolution if non-linear control flow (see `ConnectSplitSiblings`).
1160 return Split(interval, block_to->GetLifetimeStart());
1161}
1162
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001163LiveInterval* RegisterAllocator::Split(LiveInterval* interval, size_t position) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001164 DCHECK_GE(position, interval->GetStart());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001165 DCHECK(!interval->IsDeadAt(position));
1166 if (position == interval->GetStart()) {
1167 // Spill slot will be allocated when handling `interval` again.
1168 interval->ClearRegister();
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001169 if (interval->HasHighInterval()) {
1170 interval->GetHighInterval()->ClearRegister();
1171 } else if (interval->HasLowInterval()) {
1172 interval->GetLowInterval()->ClearRegister();
1173 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001174 return interval;
1175 } else {
1176 LiveInterval* new_interval = interval->SplitAt(position);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001177 if (interval->HasHighInterval()) {
1178 LiveInterval* high = interval->GetHighInterval()->SplitAt(position);
1179 new_interval->SetHighInterval(high);
1180 high->SetLowInterval(new_interval);
1181 } else if (interval->HasLowInterval()) {
1182 LiveInterval* low = interval->GetLowInterval()->SplitAt(position);
1183 new_interval->SetLowInterval(low);
1184 low->SetHighInterval(new_interval);
1185 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001186 return new_interval;
1187 }
1188}
1189
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001190void RegisterAllocator::AllocateSpillSlotFor(LiveInterval* interval) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001191 if (interval->IsHighInterval()) {
1192 // The low interval will contain the spill slot.
1193 return;
1194 }
1195
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001196 LiveInterval* parent = interval->GetParent();
1197
1198 // An instruction gets a spill slot for its entire lifetime. If the parent
1199 // of this interval already has a spill slot, there is nothing to do.
1200 if (parent->HasSpillSlot()) {
1201 return;
1202 }
1203
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001204 HInstruction* defined_by = parent->GetDefinedBy();
1205 if (defined_by->IsParameterValue()) {
1206 // Parameters have their own stack slot.
1207 parent->SetSpillSlot(codegen_->GetStackSlotOfParameter(defined_by->AsParameterValue()));
1208 return;
1209 }
1210
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001211 if (defined_by->IsConstant()) {
1212 // Constants don't need a spill slot.
1213 return;
1214 }
1215
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001216 LiveInterval* last_sibling = interval;
1217 while (last_sibling->GetNextSibling() != nullptr) {
1218 last_sibling = last_sibling->GetNextSibling();
1219 }
1220 size_t end = last_sibling->GetEnd();
1221
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001222 GrowableArray<size_t>* spill_slots = nullptr;
1223 switch (interval->GetType()) {
1224 case Primitive::kPrimDouble:
1225 spill_slots = &double_spill_slots_;
1226 break;
1227 case Primitive::kPrimLong:
1228 spill_slots = &long_spill_slots_;
1229 break;
1230 case Primitive::kPrimFloat:
1231 spill_slots = &float_spill_slots_;
1232 break;
1233 case Primitive::kPrimNot:
1234 case Primitive::kPrimInt:
1235 case Primitive::kPrimChar:
1236 case Primitive::kPrimByte:
1237 case Primitive::kPrimBoolean:
1238 case Primitive::kPrimShort:
1239 spill_slots = &int_spill_slots_;
1240 break;
1241 case Primitive::kPrimVoid:
1242 LOG(FATAL) << "Unexpected type for interval " << interval->GetType();
1243 }
1244
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001245 // Find an available spill slot.
1246 size_t slot = 0;
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001247 for (size_t e = spill_slots->Size(); slot < e; ++slot) {
1248 if (spill_slots->Get(slot) <= parent->GetStart()
1249 && (slot == (e - 1) || spill_slots->Get(slot + 1) <= parent->GetStart())) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001250 break;
1251 }
1252 }
1253
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001254 if (parent->NeedsTwoSpillSlots()) {
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001255 if (slot == spill_slots->Size()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001256 // We need a new spill slot.
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001257 spill_slots->Add(end);
1258 spill_slots->Add(end);
1259 } else if (slot == spill_slots->Size() - 1) {
1260 spill_slots->Put(slot, end);
1261 spill_slots->Add(end);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001262 } else {
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001263 spill_slots->Put(slot, end);
1264 spill_slots->Put(slot + 1, end);
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001265 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001266 } else {
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001267 if (slot == spill_slots->Size()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001268 // We need a new spill slot.
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001269 spill_slots->Add(end);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001270 } else {
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001271 spill_slots->Put(slot, end);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001272 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001273 }
1274
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001275 // Note that the exact spill slot location will be computed when we resolve,
1276 // that is when we know the number of spill slots for each type.
1277 parent->SetSpillSlot(slot);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001278}
1279
Nicolas Geoffray2a877f32014-09-10 10:49:34 +01001280static bool IsValidDestination(Location destination) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001281 return destination.IsRegister()
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001282 || destination.IsRegisterPair()
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001283 || destination.IsFpuRegister()
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001284 || destination.IsFpuRegisterPair()
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001285 || destination.IsStackSlot()
1286 || destination.IsDoubleStackSlot();
Nicolas Geoffray2a877f32014-09-10 10:49:34 +01001287}
1288
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001289void RegisterAllocator::AddMove(HParallelMove* move,
1290 Location source,
1291 Location destination,
1292 HInstruction* instruction,
1293 Primitive::Type type) const {
1294 if (type == Primitive::kPrimLong
1295 && codegen_->ShouldSplitLongMoves()
1296 // The parallel move resolver knows how to deal with long constants.
1297 && !source.IsConstant()) {
Nicolas Geoffray90218252015-04-15 11:56:51 +01001298 move->AddMove(source.ToLow(), destination.ToLow(), Primitive::kPrimInt, instruction);
1299 move->AddMove(source.ToHigh(), destination.ToHigh(), Primitive::kPrimInt, nullptr);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001300 } else {
Nicolas Geoffray90218252015-04-15 11:56:51 +01001301 move->AddMove(source, destination, type, instruction);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001302 }
1303}
1304
1305void RegisterAllocator::AddInputMoveFor(HInstruction* input,
1306 HInstruction* user,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001307 Location source,
1308 Location destination) const {
1309 if (source.Equals(destination)) return;
1310
Roland Levillain476df552014-10-09 17:51:36 +01001311 DCHECK(!user->IsPhi());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001312
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001313 HInstruction* previous = user->GetPrevious();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001314 HParallelMove* move = nullptr;
1315 if (previous == nullptr
Roland Levillain476df552014-10-09 17:51:36 +01001316 || !previous->IsParallelMove()
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001317 || previous->GetLifetimePosition() < user->GetLifetimePosition()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001318 move = new (allocator_) HParallelMove(allocator_);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001319 move->SetLifetimePosition(user->GetLifetimePosition());
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001320 user->GetBlock()->InsertInstructionBefore(move, user);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001321 } else {
1322 move = previous->AsParallelMove();
1323 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001324 DCHECK_EQ(move->GetLifetimePosition(), user->GetLifetimePosition());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001325 AddMove(move, source, destination, nullptr, input->GetType());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001326}
1327
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001328static bool IsInstructionStart(size_t position) {
1329 return (position & 1) == 0;
1330}
1331
1332static bool IsInstructionEnd(size_t position) {
1333 return (position & 1) == 1;
1334}
1335
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001336void RegisterAllocator::InsertParallelMoveAt(size_t position,
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001337 HInstruction* instruction,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001338 Location source,
1339 Location destination) const {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001340 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001341 if (source.Equals(destination)) return;
1342
1343 HInstruction* at = liveness_.GetInstructionFromPosition(position / 2);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001344 HParallelMove* move;
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001345 if (at == nullptr) {
1346 if (IsInstructionStart(position)) {
1347 // Block boundary, don't do anything the connection of split siblings will handle it.
1348 return;
1349 } else {
1350 // Move must happen before the first instruction of the block.
1351 at = liveness_.GetInstructionFromPosition((position + 1) / 2);
Nicolas Geoffray59768572014-12-01 09:50:04 +00001352 // Note that parallel moves may have already been inserted, so we explicitly
1353 // ask for the first instruction of the block: `GetInstructionFromPosition` does
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001354 // not contain the `HParallelMove` instructions.
Nicolas Geoffray59768572014-12-01 09:50:04 +00001355 at = at->GetBlock()->GetFirstInstruction();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001356
1357 if (at->GetLifetimePosition() < position) {
1358 // We may insert moves for split siblings and phi spills at the beginning of the block.
1359 // Since this is a different lifetime position, we need to go to the next instruction.
1360 DCHECK(at->IsParallelMove());
1361 at = at->GetNext();
1362 }
1363
Nicolas Geoffray59768572014-12-01 09:50:04 +00001364 if (at->GetLifetimePosition() != position) {
1365 DCHECK_GT(at->GetLifetimePosition(), position);
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001366 move = new (allocator_) HParallelMove(allocator_);
1367 move->SetLifetimePosition(position);
1368 at->GetBlock()->InsertInstructionBefore(move, at);
Nicolas Geoffray59768572014-12-01 09:50:04 +00001369 } else {
1370 DCHECK(at->IsParallelMove());
1371 move = at->AsParallelMove();
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001372 }
1373 }
1374 } else if (IsInstructionEnd(position)) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001375 // Move must happen after the instruction.
1376 DCHECK(!at->IsControlFlow());
1377 move = at->GetNext()->AsParallelMove();
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001378 // This is a parallel move for connecting siblings in a same block. We need to
1379 // differentiate it with moves for connecting blocks, and input moves.
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001380 if (move == nullptr || move->GetLifetimePosition() > position) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001381 move = new (allocator_) HParallelMove(allocator_);
1382 move->SetLifetimePosition(position);
1383 at->GetBlock()->InsertInstructionBefore(move, at->GetNext());
1384 }
1385 } else {
1386 // Move must happen before the instruction.
1387 HInstruction* previous = at->GetPrevious();
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001388 if (previous == nullptr
1389 || !previous->IsParallelMove()
1390 || previous->GetLifetimePosition() != position) {
1391 // If the previous is a parallel move, then its position must be lower
1392 // than the given `position`: it was added just after the non-parallel
1393 // move instruction that precedes `instruction`.
1394 DCHECK(previous == nullptr
1395 || !previous->IsParallelMove()
1396 || previous->GetLifetimePosition() < position);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001397 move = new (allocator_) HParallelMove(allocator_);
1398 move->SetLifetimePosition(position);
1399 at->GetBlock()->InsertInstructionBefore(move, at);
1400 } else {
1401 move = previous->AsParallelMove();
1402 }
1403 }
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001404 DCHECK_EQ(move->GetLifetimePosition(), position);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001405 AddMove(move, source, destination, instruction, instruction->GetType());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001406}
1407
1408void RegisterAllocator::InsertParallelMoveAtExitOf(HBasicBlock* block,
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001409 HInstruction* instruction,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001410 Location source,
1411 Location destination) const {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001412 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001413 if (source.Equals(destination)) return;
1414
1415 DCHECK_EQ(block->GetSuccessors().Size(), 1u);
1416 HInstruction* last = block->GetLastInstruction();
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001417 // We insert moves at exit for phi predecessors and connecting blocks.
1418 // A block ending with an if cannot branch to a block with phis because
1419 // we do not allow critical edges. It can also not connect
1420 // a split interval between two blocks: the move has to happen in the successor.
1421 DCHECK(!last->IsIf());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001422 HInstruction* previous = last->GetPrevious();
1423 HParallelMove* move;
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001424 // This is a parallel move for connecting blocks. We need to differentiate
1425 // it with moves for connecting siblings in a same block, and output moves.
Nicolas Geoffray59768572014-12-01 09:50:04 +00001426 size_t position = last->GetLifetimePosition();
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001427 if (previous == nullptr || !previous->IsParallelMove()
Nicolas Geoffray59768572014-12-01 09:50:04 +00001428 || previous->AsParallelMove()->GetLifetimePosition() != position) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001429 move = new (allocator_) HParallelMove(allocator_);
Nicolas Geoffray59768572014-12-01 09:50:04 +00001430 move->SetLifetimePosition(position);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001431 block->InsertInstructionBefore(move, last);
1432 } else {
1433 move = previous->AsParallelMove();
1434 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001435 AddMove(move, source, destination, instruction, instruction->GetType());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001436}
1437
1438void RegisterAllocator::InsertParallelMoveAtEntryOf(HBasicBlock* block,
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001439 HInstruction* instruction,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001440 Location source,
1441 Location destination) const {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001442 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001443 if (source.Equals(destination)) return;
1444
1445 HInstruction* first = block->GetFirstInstruction();
1446 HParallelMove* move = first->AsParallelMove();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001447 size_t position = block->GetLifetimeStart();
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001448 // This is a parallel move for connecting blocks. We need to differentiate
1449 // it with moves for connecting siblings in a same block, and input moves.
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001450 if (move == nullptr || move->GetLifetimePosition() != position) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001451 move = new (allocator_) HParallelMove(allocator_);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001452 move->SetLifetimePosition(position);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001453 block->InsertInstructionBefore(move, first);
1454 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001455 AddMove(move, source, destination, instruction, instruction->GetType());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001456}
1457
1458void RegisterAllocator::InsertMoveAfter(HInstruction* instruction,
1459 Location source,
1460 Location destination) const {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001461 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001462 if (source.Equals(destination)) return;
1463
Roland Levillain476df552014-10-09 17:51:36 +01001464 if (instruction->IsPhi()) {
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001465 InsertParallelMoveAtEntryOf(instruction->GetBlock(), instruction, source, destination);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001466 return;
1467 }
1468
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001469 size_t position = instruction->GetLifetimePosition() + 1;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001470 HParallelMove* move = instruction->GetNext()->AsParallelMove();
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001471 // This is a parallel move for moving the output of an instruction. We need
1472 // to differentiate with input moves, moves for connecting siblings in a
1473 // and moves for connecting blocks.
1474 if (move == nullptr || move->GetLifetimePosition() != position) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001475 move = new (allocator_) HParallelMove(allocator_);
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001476 move->SetLifetimePosition(position);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001477 instruction->GetBlock()->InsertInstructionBefore(move, instruction->GetNext());
1478 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001479 AddMove(move, source, destination, instruction, instruction->GetType());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001480}
1481
1482void RegisterAllocator::ConnectSiblings(LiveInterval* interval) {
1483 LiveInterval* current = interval;
1484 if (current->HasSpillSlot() && current->HasRegister()) {
1485 // We spill eagerly, so move must be at definition.
1486 InsertMoveAfter(interval->GetDefinedBy(),
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001487 interval->ToLocation(),
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001488 interval->NeedsTwoSpillSlots()
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001489 ? Location::DoubleStackSlot(interval->GetParent()->GetSpillSlot())
1490 : Location::StackSlot(interval->GetParent()->GetSpillSlot()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001491 }
1492 UsePosition* use = current->GetFirstUse();
Nicolas Geoffray4ed947a2015-04-27 16:58:06 +01001493 UsePosition* env_use = current->GetFirstEnvironmentUse();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001494
1495 // Walk over all siblings, updating locations of use positions, and
1496 // connecting them when they are adjacent.
1497 do {
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001498 Location source = current->ToLocation();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001499
1500 // Walk over all uses covered by this interval, and update the location
1501 // information.
Nicolas Geoffrayd8126be2015-03-27 10:22:41 +00001502
1503 LiveRange* range = current->GetFirstRange();
1504 while (range != nullptr) {
Nicolas Geoffray57902602015-04-21 14:28:41 +01001505 while (use != nullptr && use->GetPosition() < range->GetStart()) {
1506 DCHECK(use->IsSynthesized());
1507 use = use->GetNext();
1508 }
Nicolas Geoffrayd8126be2015-03-27 10:22:41 +00001509 while (use != nullptr && use->GetPosition() <= range->GetEnd()) {
Nicolas Geoffray4ed947a2015-04-27 16:58:06 +01001510 DCHECK(!use->GetIsEnvironment());
David Brazdil3fc992f2015-04-16 18:31:55 +01001511 DCHECK(current->CoversSlow(use->GetPosition()) || (use->GetPosition() == range->GetEnd()));
Nicolas Geoffray57902602015-04-21 14:28:41 +01001512 if (!use->IsSynthesized()) {
1513 LocationSummary* locations = use->GetUser()->GetLocations();
1514 Location expected_location = locations->InAt(use->GetInputIndex());
1515 // The expected (actual) location may be invalid in case the input is unused. Currently
1516 // this only happens for intrinsics.
1517 if (expected_location.IsValid()) {
1518 if (expected_location.IsUnallocated()) {
1519 locations->SetInAt(use->GetInputIndex(), source);
1520 } else if (!expected_location.IsConstant()) {
1521 AddInputMoveFor(interval->GetDefinedBy(), use->GetUser(), source, expected_location);
1522 }
1523 } else {
1524 DCHECK(use->GetUser()->IsInvoke());
1525 DCHECK(use->GetUser()->AsInvoke()->GetIntrinsic() != Intrinsics::kNone);
Nicolas Geoffrayd8126be2015-03-27 10:22:41 +00001526 }
1527 }
1528 use = use->GetNext();
1529 }
Nicolas Geoffray4ed947a2015-04-27 16:58:06 +01001530
1531 // Walk over the environment uses, and update their locations.
1532 while (env_use != nullptr && env_use->GetPosition() < range->GetStart()) {
1533 env_use = env_use->GetNext();
1534 }
1535
1536 while (env_use != nullptr && env_use->GetPosition() <= range->GetEnd()) {
Nicolas Geoffray0a23d742015-05-07 11:57:35 +01001537 DCHECK(current->CoversSlow(env_use->GetPosition())
1538 || (env_use->GetPosition() == range->GetEnd()));
1539 HEnvironment* environment = env_use->GetUser()->GetEnvironment();
1540 environment->SetLocationAt(env_use->GetInputIndex(), source);
Nicolas Geoffray4ed947a2015-04-27 16:58:06 +01001541 env_use = env_use->GetNext();
1542 }
1543
Nicolas Geoffrayd8126be2015-03-27 10:22:41 +00001544 range = range->GetNext();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001545 }
1546
1547 // If the next interval starts just after this one, and has a register,
1548 // insert a move.
1549 LiveInterval* next_sibling = current->GetNextSibling();
1550 if (next_sibling != nullptr
1551 && next_sibling->HasRegister()
1552 && current->GetEnd() == next_sibling->GetStart()) {
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001553 Location destination = next_sibling->ToLocation();
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001554 InsertParallelMoveAt(current->GetEnd(), interval->GetDefinedBy(), source, destination);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001555 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001556
Nicolas Geoffray43af7282015-04-16 13:01:01 +01001557 for (SafepointPosition* safepoint_position = current->GetFirstSafepoint();
1558 safepoint_position != nullptr;
1559 safepoint_position = safepoint_position->GetNext()) {
David Brazdil3fc992f2015-04-16 18:31:55 +01001560 DCHECK(current->CoversSlow(safepoint_position->GetPosition()));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001561
Nicolas Geoffray5588e582015-04-14 14:10:59 +01001562 LocationSummary* locations = safepoint_position->GetLocations();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001563 if ((current->GetType() == Primitive::kPrimNot) && current->GetParent()->HasSpillSlot()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001564 locations->SetStackBit(current->GetParent()->GetSpillSlot() / kVRegSize);
1565 }
1566
1567 switch (source.GetKind()) {
1568 case Location::kRegister: {
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001569 locations->AddLiveRegister(source);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001570 if (kIsDebugBuild && locations->OnlyCallsOnSlowPath()) {
1571 DCHECK_LE(locations->GetNumberOfLiveRegisters(),
1572 maximum_number_of_live_core_registers_ +
1573 maximum_number_of_live_fp_registers_);
1574 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001575 if (current->GetType() == Primitive::kPrimNot) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001576 locations->SetRegisterBit(source.reg());
Nicolas Geoffray39468442014-09-02 15:17:15 +01001577 }
1578 break;
1579 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001580 case Location::kFpuRegister: {
1581 locations->AddLiveRegister(source);
1582 break;
1583 }
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00001584
1585 case Location::kRegisterPair:
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001586 case Location::kFpuRegisterPair: {
1587 locations->AddLiveRegister(source.ToLow());
1588 locations->AddLiveRegister(source.ToHigh());
1589 break;
1590 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001591 case Location::kStackSlot: // Fall-through
1592 case Location::kDoubleStackSlot: // Fall-through
1593 case Location::kConstant: {
1594 // Nothing to do.
1595 break;
1596 }
1597 default: {
1598 LOG(FATAL) << "Unexpected location for object";
1599 }
1600 }
1601 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001602 current = next_sibling;
1603 } while (current != nullptr);
Nicolas Geoffrayd8126be2015-03-27 10:22:41 +00001604
Nicolas Geoffray57902602015-04-21 14:28:41 +01001605 if (kIsDebugBuild) {
1606 // Following uses can only be synthesized uses.
1607 while (use != nullptr) {
1608 DCHECK(use->IsSynthesized());
1609 use = use->GetNext();
1610 }
1611 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001612}
1613
1614void RegisterAllocator::ConnectSplitSiblings(LiveInterval* interval,
1615 HBasicBlock* from,
1616 HBasicBlock* to) const {
1617 if (interval->GetNextSibling() == nullptr) {
1618 // Nothing to connect. The whole range was allocated to the same location.
1619 return;
1620 }
1621
David Brazdil241a4862015-04-16 17:59:03 +01001622 // Find the intervals that cover `from` and `to`.
1623 LiveInterval* destination = interval->GetSiblingAt(to->GetLifetimeStart());
1624 LiveInterval* source = interval->GetSiblingAt(from->GetLifetimeEnd() - 1);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001625
1626 if (destination == source) {
1627 // Interval was not split.
1628 return;
1629 }
Nicolas Geoffray8ddb00c2014-09-29 12:00:40 +01001630 DCHECK(destination != nullptr && source != nullptr);
1631
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001632 if (!destination->HasRegister()) {
1633 // Values are eagerly spilled. Spill slot already contains appropriate value.
1634 return;
1635 }
1636
1637 // If `from` has only one successor, we can put the moves at the exit of it. Otherwise
1638 // we need to put the moves at the entry of `to`.
1639 if (from->GetSuccessors().Size() == 1) {
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001640 InsertParallelMoveAtExitOf(from,
1641 interval->GetParent()->GetDefinedBy(),
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001642 source->ToLocation(),
1643 destination->ToLocation());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001644 } else {
1645 DCHECK_EQ(to->GetPredecessors().Size(), 1u);
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001646 InsertParallelMoveAtEntryOf(to,
1647 interval->GetParent()->GetDefinedBy(),
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001648 source->ToLocation(),
1649 destination->ToLocation());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001650 }
1651}
1652
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001653void RegisterAllocator::Resolve() {
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001654 codegen_->InitializeCodeGeneration(GetNumberOfSpillSlots(),
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +00001655 maximum_number_of_live_core_registers_,
1656 maximum_number_of_live_fp_registers_,
1657 reserved_out_slots_,
Nicolas Geoffray0d9f17d2015-04-15 14:17:44 +01001658 codegen_->GetGraph()->GetLinearOrder());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001659
1660 // Adjust the Out Location of instructions.
1661 // TODO: Use pointers of Location inside LiveInterval to avoid doing another iteration.
1662 for (size_t i = 0, e = liveness_.GetNumberOfSsaValues(); i < e; ++i) {
1663 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
1664 LiveInterval* current = instruction->GetLiveInterval();
1665 LocationSummary* locations = instruction->GetLocations();
1666 Location location = locations->Out();
Roland Levillain476df552014-10-09 17:51:36 +01001667 if (instruction->IsParameterValue()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001668 // Now that we know the frame size, adjust the parameter's location.
1669 if (location.IsStackSlot()) {
1670 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1671 current->SetSpillSlot(location.GetStackIndex());
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +00001672 locations->UpdateOut(location);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001673 } else if (location.IsDoubleStackSlot()) {
1674 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1675 current->SetSpillSlot(location.GetStackIndex());
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +00001676 locations->UpdateOut(location);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001677 } else if (current->HasSpillSlot()) {
1678 current->SetSpillSlot(current->GetSpillSlot() + codegen_->GetFrameSize());
1679 }
Nicolas Geoffray776b3182015-02-23 14:14:57 +00001680 } else if (current->HasSpillSlot()) {
1681 // Adjust the stack slot, now that we know the number of them for each type.
1682 // The way this implementation lays out the stack is the following:
1683 // [parameter slots ]
1684 // [double spill slots ]
1685 // [long spill slots ]
1686 // [float spill slots ]
1687 // [int/ref values ]
1688 // [maximum out values ] (number of arguments for calls)
1689 // [art method ].
1690 uint32_t slot = current->GetSpillSlot();
1691 switch (current->GetType()) {
1692 case Primitive::kPrimDouble:
1693 slot += long_spill_slots_.Size();
1694 FALLTHROUGH_INTENDED;
1695 case Primitive::kPrimLong:
1696 slot += float_spill_slots_.Size();
1697 FALLTHROUGH_INTENDED;
1698 case Primitive::kPrimFloat:
1699 slot += int_spill_slots_.Size();
1700 FALLTHROUGH_INTENDED;
1701 case Primitive::kPrimNot:
1702 case Primitive::kPrimInt:
1703 case Primitive::kPrimChar:
1704 case Primitive::kPrimByte:
1705 case Primitive::kPrimBoolean:
1706 case Primitive::kPrimShort:
1707 slot += reserved_out_slots_;
1708 break;
1709 case Primitive::kPrimVoid:
1710 LOG(FATAL) << "Unexpected type for interval " << current->GetType();
1711 }
1712 current->SetSpillSlot(slot * kVRegSize);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001713 }
1714
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001715 Location source = current->ToLocation();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001716
1717 if (location.IsUnallocated()) {
1718 if (location.GetPolicy() == Location::kSameAsFirstInput) {
Calin Juravled0d48522014-11-04 16:40:20 +00001719 if (locations->InAt(0).IsUnallocated()) {
1720 locations->SetInAt(0, source);
1721 } else {
1722 DCHECK(locations->InAt(0).Equals(source));
1723 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001724 }
Nicolas Geoffray829280c2015-01-28 10:20:37 +00001725 locations->UpdateOut(source);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001726 } else {
1727 DCHECK(source.Equals(location));
1728 }
1729 }
1730
1731 // Connect siblings.
1732 for (size_t i = 0, e = liveness_.GetNumberOfSsaValues(); i < e; ++i) {
1733 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
1734 ConnectSiblings(instruction->GetLiveInterval());
1735 }
1736
1737 // Resolve non-linear control flow across branches. Order does not matter.
Nicolas Geoffray0d9f17d2015-04-15 14:17:44 +01001738 for (HLinearOrderIterator it(*codegen_->GetGraph()); !it.Done(); it.Advance()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001739 HBasicBlock* block = it.Current();
1740 BitVector* live = liveness_.GetLiveInSet(*block);
1741 for (uint32_t idx : live->Indexes()) {
1742 HInstruction* current = liveness_.GetInstructionFromSsaIndex(idx);
1743 LiveInterval* interval = current->GetLiveInterval();
1744 for (size_t i = 0, e = block->GetPredecessors().Size(); i < e; ++i) {
1745 ConnectSplitSiblings(interval, block->GetPredecessors().Get(i), block);
1746 }
1747 }
1748 }
1749
1750 // Resolve phi inputs. Order does not matter.
Nicolas Geoffray0d9f17d2015-04-15 14:17:44 +01001751 for (HLinearOrderIterator it(*codegen_->GetGraph()); !it.Done(); it.Advance()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001752 HBasicBlock* current = it.Current();
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001753 for (HInstructionIterator inst_it(current->GetPhis()); !inst_it.Done(); inst_it.Advance()) {
1754 HInstruction* phi = inst_it.Current();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001755 for (size_t i = 0, e = current->GetPredecessors().Size(); i < e; ++i) {
1756 HBasicBlock* predecessor = current->GetPredecessors().Get(i);
1757 DCHECK_EQ(predecessor->GetSuccessors().Size(), 1u);
1758 HInstruction* input = phi->InputAt(i);
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001759 Location source = input->GetLiveInterval()->GetLocationAt(
1760 predecessor->GetLifetimeEnd() - 1);
1761 Location destination = phi->GetLiveInterval()->ToLocation();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001762 InsertParallelMoveAtExitOf(predecessor, phi, source, destination);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001763 }
1764 }
1765 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001766
1767 // Assign temp locations.
Nicolas Geoffray39468442014-09-02 15:17:15 +01001768 for (size_t i = 0; i < temp_intervals_.Size(); ++i) {
1769 LiveInterval* temp = temp_intervals_.Get(i);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001770 if (temp->IsHighInterval()) {
1771 // High intervals can be skipped, they are already handled by the low interval.
1772 continue;
1773 }
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001774 HInstruction* at = liveness_.GetTempUser(temp);
Nicolas Geoffrayf01d3442015-03-27 17:15:49 +00001775 size_t temp_index = liveness_.GetTempIndex(temp);
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001776 LocationSummary* locations = at->GetLocations();
Roland Levillain5368c212014-11-27 15:03:41 +00001777 switch (temp->GetType()) {
1778 case Primitive::kPrimInt:
Nicolas Geoffrayf01d3442015-03-27 17:15:49 +00001779 locations->SetTempAt(temp_index, Location::RegisterLocation(temp->GetRegister()));
Roland Levillain5368c212014-11-27 15:03:41 +00001780 break;
1781
1782 case Primitive::kPrimDouble:
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001783 if (codegen_->NeedsTwoRegisters(Primitive::kPrimDouble)) {
1784 Location location = Location::FpuRegisterPairLocation(
1785 temp->GetRegister(), temp->GetHighInterval()->GetRegister());
Nicolas Geoffrayf01d3442015-03-27 17:15:49 +00001786 locations->SetTempAt(temp_index, location);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001787 } else {
Nicolas Geoffrayf01d3442015-03-27 17:15:49 +00001788 locations->SetTempAt(temp_index, Location::FpuRegisterLocation(temp->GetRegister()));
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001789 }
Roland Levillain5368c212014-11-27 15:03:41 +00001790 break;
1791
1792 default:
1793 LOG(FATAL) << "Unexpected type for temporary location "
1794 << temp->GetType();
1795 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001796 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001797}
1798
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001799} // namespace art