blob: 6cd20249c11b84ebf389075a5369eb47f5fd5045 [file] [log] [blame]
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "register_allocator.h"
18
Ian Rogersc7dd2952014-10-21 23:31:19 -070019#include <sstream>
20
Ian Rogerse77493c2014-08-20 15:08:45 -070021#include "base/bit_vector-inl.h"
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010022#include "code_generator.h"
23#include "ssa_liveness_analysis.h"
24
25namespace art {
26
27static constexpr size_t kMaxLifetimePosition = -1;
Nicolas Geoffray31d76b42014-06-09 15:02:22 +010028static constexpr size_t kDefaultNumberOfSpillSlots = 4;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010029
Nicolas Geoffray840e5462015-01-07 16:01:24 +000030// For simplicity, we implement register pairs as (reg, reg + 1).
31// Note that this is a requirement for double registers on ARM, since we
32// allocate SRegister.
33static int GetHighForLowRegister(int reg) { return reg + 1; }
34static bool IsLowRegister(int reg) { return (reg & 1) == 0; }
35
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010036RegisterAllocator::RegisterAllocator(ArenaAllocator* allocator,
37 CodeGenerator* codegen,
38 const SsaLivenessAnalysis& liveness)
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010039 : allocator_(allocator),
40 codegen_(codegen),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010041 liveness_(liveness),
Nicolas Geoffray39468442014-09-02 15:17:15 +010042 unhandled_core_intervals_(allocator, 0),
43 unhandled_fp_intervals_(allocator, 0),
44 unhandled_(nullptr),
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010045 handled_(allocator, 0),
46 active_(allocator, 0),
47 inactive_(allocator, 0),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +010048 physical_core_register_intervals_(allocator, codegen->GetNumberOfCoreRegisters()),
49 physical_fp_register_intervals_(allocator, codegen->GetNumberOfFloatingPointRegisters()),
Nicolas Geoffray39468442014-09-02 15:17:15 +010050 temp_intervals_(allocator, 4),
Nicolas Geoffray31d76b42014-06-09 15:02:22 +010051 spill_slots_(allocator, kDefaultNumberOfSpillSlots),
Nicolas Geoffray39468442014-09-02 15:17:15 +010052 safepoints_(allocator, 0),
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010053 processing_core_registers_(false),
54 number_of_registers_(-1),
55 registers_array_(nullptr),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +010056 blocked_core_registers_(codegen->GetBlockedCoreRegisters()),
57 blocked_fp_registers_(codegen->GetBlockedFloatingPointRegisters()),
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +010058 reserved_out_slots_(0),
Mark Mendellf85a9ca2015-01-13 09:20:58 -050059 maximum_number_of_live_core_registers_(0),
60 maximum_number_of_live_fp_registers_(0) {
Nicolas Geoffray71175b72014-10-09 22:13:55 +010061 codegen->SetupBlockedRegisters();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +010062 physical_core_register_intervals_.SetSize(codegen->GetNumberOfCoreRegisters());
63 physical_fp_register_intervals_.SetSize(codegen->GetNumberOfFloatingPointRegisters());
Nicolas Geoffray39468442014-09-02 15:17:15 +010064 // Always reserve for the current method and the graph's max out registers.
65 // TODO: compute it instead.
66 reserved_out_slots_ = 1 + codegen->GetGraph()->GetMaximumNumberOfOutVRegs();
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010067}
68
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010069bool RegisterAllocator::CanAllocateRegistersFor(const HGraph& graph,
70 InstructionSet instruction_set) {
71 if (!Supports(instruction_set)) {
72 return false;
73 }
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +000074 if (instruction_set == kArm64
75 || instruction_set == kX86_64
76 || instruction_set == kArm
77 || instruction_set == kThumb2) {
Alexandre Rames3e69f162014-12-10 10:36:50 +000078 return true;
79 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010080 for (size_t i = 0, e = graph.GetBlocks().Size(); i < e; ++i) {
81 for (HInstructionIterator it(graph.GetBlocks().Get(i)->GetInstructions());
82 !it.Done();
83 it.Advance()) {
84 HInstruction* current = it.Current();
Nicolas Geoffray840e5462015-01-07 16:01:24 +000085 if (instruction_set == kX86) {
86 if (current->GetType() == Primitive::kPrimLong ||
87 current->GetType() == Primitive::kPrimFloat ||
88 current->GetType() == Primitive::kPrimDouble) {
89 return false;
90 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +010091 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010092 }
93 }
94 return true;
95}
96
97static bool ShouldProcess(bool processing_core_registers, LiveInterval* interval) {
Nicolas Geoffray39468442014-09-02 15:17:15 +010098 if (interval == nullptr) return false;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +010099 bool is_core_register = (interval->GetType() != Primitive::kPrimDouble)
100 && (interval->GetType() != Primitive::kPrimFloat);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100101 return processing_core_registers == is_core_register;
102}
103
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100104void RegisterAllocator::AllocateRegisters() {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100105 AllocateRegistersInternal();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100106 Resolve();
107
108 if (kIsDebugBuild) {
109 processing_core_registers_ = true;
110 ValidateInternal(true);
111 processing_core_registers_ = false;
112 ValidateInternal(true);
Nicolas Geoffray59768572014-12-01 09:50:04 +0000113 // Check that the linear order is still correct with regards to lifetime positions.
114 // Since only parallel moves have been inserted during the register allocation,
115 // these checks are mostly for making sure these moves have been added correctly.
116 size_t current_liveness = 0;
117 for (HLinearOrderIterator it(liveness_); !it.Done(); it.Advance()) {
118 HBasicBlock* block = it.Current();
119 for (HInstructionIterator inst_it(block->GetPhis()); !inst_it.Done(); inst_it.Advance()) {
120 HInstruction* instruction = inst_it.Current();
121 DCHECK_LE(current_liveness, instruction->GetLifetimePosition());
122 current_liveness = instruction->GetLifetimePosition();
123 }
124 for (HInstructionIterator inst_it(block->GetInstructions());
125 !inst_it.Done();
126 inst_it.Advance()) {
127 HInstruction* instruction = inst_it.Current();
128 DCHECK_LE(current_liveness, instruction->GetLifetimePosition()) << instruction->DebugName();
129 current_liveness = instruction->GetLifetimePosition();
130 }
131 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100132 }
133}
134
135void RegisterAllocator::BlockRegister(Location location,
136 size_t start,
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100137 size_t end) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +0100138 int reg = location.reg();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100139 DCHECK(location.IsRegister() || location.IsFpuRegister());
140 LiveInterval* interval = location.IsRegister()
141 ? physical_core_register_intervals_.Get(reg)
142 : physical_fp_register_intervals_.Get(reg);
143 Primitive::Type type = location.IsRegister()
144 ? Primitive::kPrimInt
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000145 : Primitive::kPrimFloat;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100146 if (interval == nullptr) {
147 interval = LiveInterval::MakeFixedInterval(allocator_, reg, type);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100148 if (location.IsRegister()) {
149 physical_core_register_intervals_.Put(reg, interval);
150 } else {
151 physical_fp_register_intervals_.Put(reg, interval);
152 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100153 }
154 DCHECK(interval->GetRegister() == reg);
155 interval->AddRange(start, end);
156}
157
158void RegisterAllocator::AllocateRegistersInternal() {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100159 // Iterate post-order, to ensure the list is sorted, and the last added interval
160 // is the one with the lowest start position.
Nicolas Geoffray39468442014-09-02 15:17:15 +0100161 for (HLinearPostOrderIterator it(liveness_); !it.Done(); it.Advance()) {
162 HBasicBlock* block = it.Current();
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800163 for (HBackwardInstructionIterator back_it(block->GetInstructions()); !back_it.Done();
164 back_it.Advance()) {
165 ProcessInstruction(back_it.Current());
Nicolas Geoffray39468442014-09-02 15:17:15 +0100166 }
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800167 for (HInstructionIterator inst_it(block->GetPhis()); !inst_it.Done(); inst_it.Advance()) {
168 ProcessInstruction(inst_it.Current());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100169 }
170 }
171
Nicolas Geoffray39468442014-09-02 15:17:15 +0100172 number_of_registers_ = codegen_->GetNumberOfCoreRegisters();
173 registers_array_ = allocator_->AllocArray<size_t>(number_of_registers_);
174 processing_core_registers_ = true;
175 unhandled_ = &unhandled_core_intervals_;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100176 for (size_t i = 0, e = physical_core_register_intervals_.Size(); i < e; ++i) {
177 LiveInterval* fixed = physical_core_register_intervals_.Get(i);
178 if (fixed != nullptr) {
Mingyao Yang296bd602014-10-06 16:47:28 -0700179 // Fixed interval is added to inactive_ instead of unhandled_.
180 // It's also the only type of inactive interval whose start position
181 // can be after the current interval during linear scan.
182 // Fixed interval is never split and never moves to unhandled_.
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100183 inactive_.Add(fixed);
184 }
185 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100186 LinearScan();
Nicolas Geoffray39468442014-09-02 15:17:15 +0100187
188 inactive_.Reset();
189 active_.Reset();
190 handled_.Reset();
191
192 number_of_registers_ = codegen_->GetNumberOfFloatingPointRegisters();
193 registers_array_ = allocator_->AllocArray<size_t>(number_of_registers_);
194 processing_core_registers_ = false;
195 unhandled_ = &unhandled_fp_intervals_;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100196 for (size_t i = 0, e = physical_fp_register_intervals_.Size(); i < e; ++i) {
197 LiveInterval* fixed = physical_fp_register_intervals_.Get(i);
198 if (fixed != nullptr) {
Mingyao Yang296bd602014-10-06 16:47:28 -0700199 // Fixed interval is added to inactive_ instead of unhandled_.
200 // It's also the only type of inactive interval whose start position
201 // can be after the current interval during linear scan.
202 // Fixed interval is never split and never moves to unhandled_.
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100203 inactive_.Add(fixed);
204 }
205 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100206 LinearScan();
207}
208
209void RegisterAllocator::ProcessInstruction(HInstruction* instruction) {
210 LocationSummary* locations = instruction->GetLocations();
211 size_t position = instruction->GetLifetimePosition();
212
213 if (locations == nullptr) return;
214
215 // Create synthesized intervals for temporaries.
216 for (size_t i = 0; i < locations->GetTempCount(); ++i) {
217 Location temp = locations->GetTemp(i);
Nicolas Geoffray52839d12014-11-07 17:47:25 +0000218 if (temp.IsRegister() || temp.IsFpuRegister()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100219 BlockRegister(temp, position, position + 1);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100220 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100221 DCHECK(temp.IsUnallocated());
Roland Levillain5368c212014-11-27 15:03:41 +0000222 switch (temp.GetPolicy()) {
223 case Location::kRequiresRegister: {
224 LiveInterval* interval =
225 LiveInterval::MakeTempInterval(allocator_, Primitive::kPrimInt);
226 temp_intervals_.Add(interval);
227 interval->AddRange(position, position + 1);
228 unhandled_core_intervals_.Add(interval);
229 break;
230 }
231
232 case Location::kRequiresFpuRegister: {
233 LiveInterval* interval =
234 LiveInterval::MakeTempInterval(allocator_, Primitive::kPrimDouble);
235 temp_intervals_.Add(interval);
236 interval->AddRange(position, position + 1);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000237 if (codegen_->NeedsTwoRegisters(Primitive::kPrimDouble)) {
238 interval->AddHighInterval(true);
239 LiveInterval* high = interval->GetHighInterval();
240 temp_intervals_.Add(high);
241 unhandled_fp_intervals_.Add(high);
242 }
Roland Levillain5368c212014-11-27 15:03:41 +0000243 unhandled_fp_intervals_.Add(interval);
244 break;
245 }
246
247 default:
248 LOG(FATAL) << "Unexpected policy for temporary location "
249 << temp.GetPolicy();
250 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100251 }
252 }
253
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100254 bool core_register = (instruction->GetType() != Primitive::kPrimDouble)
255 && (instruction->GetType() != Primitive::kPrimFloat);
256
Nicolas Geoffray39468442014-09-02 15:17:15 +0100257 if (locations->CanCall()) {
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100258 if (!instruction->IsSuspendCheck()) {
259 codegen_->MarkNotLeaf();
260 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100261 safepoints_.Add(instruction);
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100262 if (locations->OnlyCallsOnSlowPath()) {
263 // We add a synthesized range at this position to record the live registers
264 // at this position. Ideally, we could just update the safepoints when locations
265 // are updated, but we currently need to know the full stack size before updating
266 // locations (because of parameters and the fact that we don't have a frame pointer).
267 // And knowing the full stack size requires to know the maximum number of live
268 // registers at calls in slow paths.
269 // By adding the following interval in the algorithm, we can compute this
270 // maximum before updating locations.
271 LiveInterval* interval = LiveInterval::MakeSlowPathInterval(allocator_, instruction);
Nicolas Geoffrayacd03392014-11-26 15:46:52 +0000272 interval->AddRange(position, position + 1);
Nicolas Geoffray87d03762014-11-19 15:17:56 +0000273 AddSorted(&unhandled_core_intervals_, interval);
274 AddSorted(&unhandled_fp_intervals_, interval);
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100275 }
276 }
277
278 if (locations->WillCall()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100279 // Block all registers.
280 for (size_t i = 0; i < codegen_->GetNumberOfCoreRegisters(); ++i) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +0100281 BlockRegister(Location::RegisterLocation(i),
Nicolas Geoffray39468442014-09-02 15:17:15 +0100282 position,
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100283 position + 1);
284 }
285 for (size_t i = 0; i < codegen_->GetNumberOfFloatingPointRegisters(); ++i) {
286 BlockRegister(Location::FpuRegisterLocation(i),
287 position,
288 position + 1);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100289 }
290 }
291
292 for (size_t i = 0; i < instruction->InputCount(); ++i) {
293 Location input = locations->InAt(i);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100294 if (input.IsRegister() || input.IsFpuRegister()) {
295 BlockRegister(input, position, position + 1);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000296 } else if (input.IsPair()) {
297 BlockRegister(input.ToLow(), position, position + 1);
298 BlockRegister(input.ToHigh(), position, position + 1);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100299 }
300 }
301
Nicolas Geoffray39468442014-09-02 15:17:15 +0100302 LiveInterval* current = instruction->GetLiveInterval();
303 if (current == nullptr) return;
304
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100305 GrowableArray<LiveInterval*>& unhandled = core_register
306 ? unhandled_core_intervals_
307 : unhandled_fp_intervals_;
308
Nicolas Geoffray76905622014-09-25 14:39:26 +0100309 DCHECK(unhandled.IsEmpty() || current->StartsBeforeOrAt(unhandled.Peek()));
Nicolas Geoffray87d03762014-11-19 15:17:56 +0000310
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000311 if (codegen_->NeedsTwoRegisters(current->GetType())) {
312 current->AddHighInterval();
313 }
314
Nicolas Geoffray39468442014-09-02 15:17:15 +0100315 // Some instructions define their output in fixed register/stack slot. We need
316 // to ensure we know these locations before doing register allocation. For a
317 // given register, we create an interval that covers these locations. The register
318 // will be unavailable at these locations when trying to allocate one for an
319 // interval.
320 //
321 // The backwards walking ensures the ranges are ordered on increasing start positions.
322 Location output = locations->Out();
Calin Juravled0d48522014-11-04 16:40:20 +0000323 if (output.IsUnallocated() && output.GetPolicy() == Location::kSameAsFirstInput) {
324 Location first = locations->InAt(0);
325 if (first.IsRegister() || first.IsFpuRegister()) {
326 current->SetFrom(position + 1);
327 current->SetRegister(first.reg());
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000328 } else if (first.IsPair()) {
329 current->SetFrom(position + 1);
330 current->SetRegister(first.low());
331 LiveInterval* high = current->GetHighInterval();
332 high->SetRegister(first.high());
333 high->SetFrom(position + 1);
Calin Juravled0d48522014-11-04 16:40:20 +0000334 }
335 } else if (output.IsRegister() || output.IsFpuRegister()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100336 // Shift the interval's start by one to account for the blocked register.
337 current->SetFrom(position + 1);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +0100338 current->SetRegister(output.reg());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100339 BlockRegister(output, position, position + 1);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000340 } else if (output.IsPair()) {
341 current->SetFrom(position + 1);
342 current->SetRegister(output.low());
343 LiveInterval* high = current->GetHighInterval();
344 high->SetRegister(output.high());
345 high->SetFrom(position + 1);
346 BlockRegister(output.ToLow(), position, position + 1);
347 BlockRegister(output.ToHigh(), position, position + 1);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100348 } else if (output.IsStackSlot() || output.IsDoubleStackSlot()) {
349 current->SetSpillSlot(output.GetStackIndex());
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000350 } else {
351 DCHECK(output.IsUnallocated() || output.IsConstant());
Nicolas Geoffray39468442014-09-02 15:17:15 +0100352 }
353
354 // If needed, add interval to the list of unhandled intervals.
355 if (current->HasSpillSlot() || instruction->IsConstant()) {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +0100356 // Split just before first register use.
Nicolas Geoffray39468442014-09-02 15:17:15 +0100357 size_t first_register_use = current->FirstRegisterUse();
358 if (first_register_use != kNoLifetime) {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +0100359 LiveInterval* split = Split(current, first_register_use - 1);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000360 // Don't add directly to `unhandled`, it needs to be sorted and the start
Nicolas Geoffray39468442014-09-02 15:17:15 +0100361 // of this new interval might be after intervals already in the list.
362 AddSorted(&unhandled, split);
363 } else {
364 // Nothing to do, we won't allocate a register for this value.
365 }
366 } else {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000367 // Don't add directly to `unhandled`, temp or safepoint intervals
368 // for this instruction may have been added, and those can be
369 // processed first.
370 AddSorted(&unhandled, current);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100371 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100372}
373
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100374class AllRangesIterator : public ValueObject {
375 public:
376 explicit AllRangesIterator(LiveInterval* interval)
377 : current_interval_(interval),
378 current_range_(interval->GetFirstRange()) {}
379
380 bool Done() const { return current_interval_ == nullptr; }
381 LiveRange* CurrentRange() const { return current_range_; }
382 LiveInterval* CurrentInterval() const { return current_interval_; }
383
384 void Advance() {
385 current_range_ = current_range_->GetNext();
386 if (current_range_ == nullptr) {
387 current_interval_ = current_interval_->GetNextSibling();
388 if (current_interval_ != nullptr) {
389 current_range_ = current_interval_->GetFirstRange();
390 }
391 }
392 }
393
394 private:
395 LiveInterval* current_interval_;
396 LiveRange* current_range_;
397
398 DISALLOW_COPY_AND_ASSIGN(AllRangesIterator);
399};
400
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100401bool RegisterAllocator::ValidateInternal(bool log_fatal_on_failure) const {
402 // To simplify unit testing, we eagerly create the array of intervals, and
403 // call the helper method.
404 GrowableArray<LiveInterval*> intervals(allocator_, 0);
405 for (size_t i = 0; i < liveness_.GetNumberOfSsaValues(); ++i) {
406 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
407 if (ShouldProcess(processing_core_registers_, instruction->GetLiveInterval())) {
408 intervals.Add(instruction->GetLiveInterval());
409 }
410 }
411
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100412 if (processing_core_registers_) {
413 for (size_t i = 0, e = physical_core_register_intervals_.Size(); i < e; ++i) {
414 LiveInterval* fixed = physical_core_register_intervals_.Get(i);
415 if (fixed != nullptr) {
416 intervals.Add(fixed);
417 }
418 }
419 } else {
420 for (size_t i = 0, e = physical_fp_register_intervals_.Size(); i < e; ++i) {
421 LiveInterval* fixed = physical_fp_register_intervals_.Get(i);
422 if (fixed != nullptr) {
423 intervals.Add(fixed);
424 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100425 }
426 }
427
Nicolas Geoffray39468442014-09-02 15:17:15 +0100428 for (size_t i = 0, e = temp_intervals_.Size(); i < e; ++i) {
429 LiveInterval* temp = temp_intervals_.Get(i);
430 if (ShouldProcess(processing_core_registers_, temp)) {
431 intervals.Add(temp);
432 }
433 }
434
435 return ValidateIntervals(intervals, spill_slots_.Size(), reserved_out_slots_, *codegen_,
436 allocator_, processing_core_registers_, log_fatal_on_failure);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100437}
438
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100439bool RegisterAllocator::ValidateIntervals(const GrowableArray<LiveInterval*>& intervals,
440 size_t number_of_spill_slots,
Nicolas Geoffray39468442014-09-02 15:17:15 +0100441 size_t number_of_out_slots,
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100442 const CodeGenerator& codegen,
443 ArenaAllocator* allocator,
444 bool processing_core_registers,
445 bool log_fatal_on_failure) {
446 size_t number_of_registers = processing_core_registers
447 ? codegen.GetNumberOfCoreRegisters()
448 : codegen.GetNumberOfFloatingPointRegisters();
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100449 GrowableArray<ArenaBitVector*> liveness_of_values(
450 allocator, number_of_registers + number_of_spill_slots);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100451
452 // Allocate a bit vector per register. A live interval that has a register
453 // allocated will populate the associated bit vector based on its live ranges.
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100454 for (size_t i = 0; i < number_of_registers + number_of_spill_slots; ++i) {
455 liveness_of_values.Add(new (allocator) ArenaBitVector(allocator, 0, true));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100456 }
457
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100458 for (size_t i = 0, e = intervals.Size(); i < e; ++i) {
459 for (AllRangesIterator it(intervals.Get(i)); !it.Done(); it.Advance()) {
460 LiveInterval* current = it.CurrentInterval();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100461 HInstruction* defined_by = current->GetParent()->GetDefinedBy();
462 if (current->GetParent()->HasSpillSlot()
463 // Parameters have their own stack slot.
464 && !(defined_by != nullptr && defined_by->IsParameterValue())) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100465 BitVector* liveness_of_spill_slot = liveness_of_values.Get(number_of_registers
466 + current->GetParent()->GetSpillSlot() / kVRegSize
467 - number_of_out_slots);
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100468 for (size_t j = it.CurrentRange()->GetStart(); j < it.CurrentRange()->GetEnd(); ++j) {
469 if (liveness_of_spill_slot->IsBitSet(j)) {
470 if (log_fatal_on_failure) {
471 std::ostringstream message;
472 message << "Spill slot conflict at " << j;
473 LOG(FATAL) << message.str();
474 } else {
475 return false;
476 }
477 } else {
478 liveness_of_spill_slot->SetBit(j);
479 }
480 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100481 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100482
483 if (current->HasRegister()) {
484 BitVector* liveness_of_register = liveness_of_values.Get(current->GetRegister());
485 for (size_t j = it.CurrentRange()->GetStart(); j < it.CurrentRange()->GetEnd(); ++j) {
486 if (liveness_of_register->IsBitSet(j)) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100487 if (log_fatal_on_failure) {
488 std::ostringstream message;
Nicolas Geoffray39468442014-09-02 15:17:15 +0100489 message << "Register conflict at " << j << " ";
490 if (defined_by != nullptr) {
491 message << "(" << defined_by->DebugName() << ")";
492 }
493 message << "for ";
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100494 if (processing_core_registers) {
495 codegen.DumpCoreRegister(message, current->GetRegister());
496 } else {
497 codegen.DumpFloatingPointRegister(message, current->GetRegister());
498 }
499 LOG(FATAL) << message.str();
500 } else {
501 return false;
502 }
503 } else {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100504 liveness_of_register->SetBit(j);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100505 }
506 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100507 }
508 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100509 }
510 return true;
511}
512
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100513void RegisterAllocator::DumpInterval(std::ostream& stream, LiveInterval* interval) const {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100514 interval->Dump(stream);
515 stream << ": ";
516 if (interval->HasRegister()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100517 if (interval->IsFloatingPoint()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100518 codegen_->DumpFloatingPointRegister(stream, interval->GetRegister());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100519 } else {
520 codegen_->DumpCoreRegister(stream, interval->GetRegister());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100521 }
522 } else {
523 stream << "spilled";
524 }
525 stream << std::endl;
526}
527
Mingyao Yang296bd602014-10-06 16:47:28 -0700528void RegisterAllocator::DumpAllIntervals(std::ostream& stream) const {
529 stream << "inactive: " << std::endl;
530 for (size_t i = 0; i < inactive_.Size(); i ++) {
531 DumpInterval(stream, inactive_.Get(i));
532 }
533 stream << "active: " << std::endl;
534 for (size_t i = 0; i < active_.Size(); i ++) {
535 DumpInterval(stream, active_.Get(i));
536 }
537 stream << "unhandled: " << std::endl;
538 auto unhandled = (unhandled_ != nullptr) ?
539 unhandled_ : &unhandled_core_intervals_;
540 for (size_t i = 0; i < unhandled->Size(); i ++) {
541 DumpInterval(stream, unhandled->Get(i));
542 }
543 stream << "handled: " << std::endl;
544 for (size_t i = 0; i < handled_.Size(); i ++) {
545 DumpInterval(stream, handled_.Get(i));
546 }
547}
548
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100549// By the book implementation of a linear scan register allocator.
550void RegisterAllocator::LinearScan() {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100551 while (!unhandled_->IsEmpty()) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100552 // (1) Remove interval with the lowest start position from unhandled.
Nicolas Geoffray39468442014-09-02 15:17:15 +0100553 LiveInterval* current = unhandled_->Pop();
554 DCHECK(!current->IsFixed() && !current->HasSpillSlot());
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +0100555 DCHECK(unhandled_->IsEmpty() || unhandled_->Peek()->GetStart() >= current->GetStart());
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000556 DCHECK(!current->IsLowInterval() || unhandled_->Peek()->IsHighInterval());
Nicolas Geoffray87d03762014-11-19 15:17:56 +0000557
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100558 size_t position = current->GetStart();
559
Mingyao Yang296bd602014-10-06 16:47:28 -0700560 // Remember the inactive_ size here since the ones moved to inactive_ from
561 // active_ below shouldn't need to be re-checked.
562 size_t inactive_intervals_to_handle = inactive_.Size();
563
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100564 // (2) Remove currently active intervals that are dead at this position.
565 // Move active intervals that have a lifetime hole at this position
566 // to inactive.
567 for (size_t i = 0; i < active_.Size(); ++i) {
568 LiveInterval* interval = active_.Get(i);
569 if (interval->IsDeadAt(position)) {
570 active_.Delete(interval);
571 --i;
572 handled_.Add(interval);
573 } else if (!interval->Covers(position)) {
574 active_.Delete(interval);
575 --i;
576 inactive_.Add(interval);
577 }
578 }
579
580 // (3) Remove currently inactive intervals that are dead at this position.
581 // Move inactive intervals that cover this position to active.
Mingyao Yang296bd602014-10-06 16:47:28 -0700582 for (size_t i = 0; i < inactive_intervals_to_handle; ++i) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100583 LiveInterval* interval = inactive_.Get(i);
Mingyao Yang296bd602014-10-06 16:47:28 -0700584 DCHECK(interval->GetStart() < position || interval->IsFixed());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100585 if (interval->IsDeadAt(position)) {
586 inactive_.Delete(interval);
587 --i;
Mingyao Yang296bd602014-10-06 16:47:28 -0700588 --inactive_intervals_to_handle;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100589 handled_.Add(interval);
590 } else if (interval->Covers(position)) {
591 inactive_.Delete(interval);
592 --i;
Mingyao Yang296bd602014-10-06 16:47:28 -0700593 --inactive_intervals_to_handle;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100594 active_.Add(interval);
595 }
596 }
597
Nicolas Geoffrayacd03392014-11-26 15:46:52 +0000598 if (current->IsSlowPathSafepoint()) {
599 // Synthesized interval to record the maximum number of live registers
600 // at safepoints. No need to allocate a register for it.
Mark Mendellf85a9ca2015-01-13 09:20:58 -0500601 if (processing_core_registers_) {
602 maximum_number_of_live_core_registers_ =
603 std::max(maximum_number_of_live_core_registers_, active_.Size());
604 } else {
605 maximum_number_of_live_fp_registers_ =
606 std::max(maximum_number_of_live_fp_registers_, active_.Size());
607 }
Nicolas Geoffrayacd03392014-11-26 15:46:52 +0000608 DCHECK(unhandled_->IsEmpty() || unhandled_->Peek()->GetStart() > current->GetStart());
609 continue;
610 }
611
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000612 if (current->IsHighInterval() && !current->GetLowInterval()->HasRegister()) {
613 DCHECK(!current->HasRegister());
614 // Allocating the low part was unsucessful. The splitted interval for the high part
615 // will be handled next (it is in the `unhandled_` list).
616 continue;
617 }
618
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100619 // (4) Try to find an available register.
620 bool success = TryAllocateFreeReg(current);
621
622 // (5) If no register could be found, we need to spill.
623 if (!success) {
624 success = AllocateBlockedReg(current);
625 }
626
627 // (6) If the interval had a register allocated, add it to the list of active
628 // intervals.
629 if (success) {
630 active_.Add(current);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000631 if (current->HasHighInterval() && !current->GetHighInterval()->HasRegister()) {
632 current->GetHighInterval()->SetRegister(GetHighForLowRegister(current->GetRegister()));
633 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100634 }
635 }
636}
637
638// Find a free register. If multiple are found, pick the register that
639// is free the longest.
640bool RegisterAllocator::TryAllocateFreeReg(LiveInterval* current) {
641 size_t* free_until = registers_array_;
642
643 // First set all registers to be free.
644 for (size_t i = 0; i < number_of_registers_; ++i) {
645 free_until[i] = kMaxLifetimePosition;
646 }
647
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100648 // For each active interval, set its register to not free.
649 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
650 LiveInterval* interval = active_.Get(i);
651 DCHECK(interval->HasRegister());
652 free_until[interval->GetRegister()] = 0;
653 }
654
Mingyao Yang296bd602014-10-06 16:47:28 -0700655 // For each inactive interval, set its register to be free until
656 // the next intersection with `current`.
657 for (size_t i = 0, e = inactive_.Size(); i < e; ++i) {
658 LiveInterval* inactive = inactive_.Get(i);
659 // Temp/Slow-path-safepoint interval has no holes.
660 DCHECK(!inactive->IsTemp() && !inactive->IsSlowPathSafepoint());
661 if (!current->IsSplit() && !inactive->IsFixed()) {
662 // Neither current nor inactive are fixed.
663 // Thanks to SSA, a non-split interval starting in a hole of an
664 // inactive interval should never intersect with that inactive interval.
665 // Only if it's not fixed though, because fixed intervals don't come from SSA.
666 DCHECK_EQ(inactive->FirstIntersectionWith(current), kNoLifetime);
667 continue;
668 }
669
670 DCHECK(inactive->HasRegister());
671 if (free_until[inactive->GetRegister()] == 0) {
672 // Already used by some active interval. No need to intersect.
673 continue;
674 }
675 size_t next_intersection = inactive->FirstIntersectionWith(current);
676 if (next_intersection != kNoLifetime) {
677 free_until[inactive->GetRegister()] =
678 std::min(free_until[inactive->GetRegister()], next_intersection);
679 }
680 }
681
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000682 int reg = kNoRegister;
Nicolas Geoffray39468442014-09-02 15:17:15 +0100683 if (current->HasRegister()) {
684 // Some instructions have a fixed register output.
685 reg = current->GetRegister();
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000686 if (free_until[reg] == 0) {
687 DCHECK(current->IsHighInterval());
688 // AllocateBlockedReg will spill the holder of the register.
689 return false;
690 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100691 } else {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000692 DCHECK(!current->IsHighInterval());
Nicolas Geoffray01ef3452014-10-01 11:32:17 +0100693 int hint = current->FindFirstRegisterHint(free_until);
694 if (hint != kNoRegister) {
695 DCHECK(!IsBlocked(hint));
696 reg = hint;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000697 } else if (current->IsLowInterval()) {
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000698 reg = FindAvailableRegisterPair(free_until, current->GetStart());
Nicolas Geoffray01ef3452014-10-01 11:32:17 +0100699 } else {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000700 reg = FindAvailableRegister(free_until);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100701 }
702 }
703
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000704 DCHECK_NE(reg, kNoRegister);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100705 // If we could not find a register, we need to spill.
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000706 if (free_until[reg] == 0) {
707 return false;
708 }
709
710 if (current->IsLowInterval() && free_until[GetHighForLowRegister(reg)] == 0) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100711 return false;
712 }
713
714 current->SetRegister(reg);
715 if (!current->IsDeadAt(free_until[reg])) {
716 // If the register is only available for a subset of live ranges
717 // covered by `current`, split `current` at the position where
718 // the register is not available anymore.
719 LiveInterval* split = Split(current, free_until[reg]);
720 DCHECK(split != nullptr);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100721 AddSorted(unhandled_, split);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100722 }
723 return true;
724}
725
726bool RegisterAllocator::IsBlocked(int reg) const {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100727 return processing_core_registers_
728 ? blocked_core_registers_[reg]
729 : blocked_fp_registers_[reg];
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100730}
731
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000732int RegisterAllocator::FindAvailableRegisterPair(size_t* next_use, size_t starting_at) const {
733 int reg = kNoRegister;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000734 // Pick the register pair that is used the last.
735 for (size_t i = 0; i < number_of_registers_; ++i) {
736 if (IsBlocked(i)) continue;
737 if (!IsLowRegister(i)) continue;
738 int high_register = GetHighForLowRegister(i);
739 if (IsBlocked(high_register)) continue;
740 int existing_high_register = GetHighForLowRegister(reg);
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000741 if ((reg == kNoRegister) || (next_use[i] >= next_use[reg]
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000742 && next_use[high_register] >= next_use[existing_high_register])) {
743 reg = i;
744 if (next_use[i] == kMaxLifetimePosition
745 && next_use[high_register] == kMaxLifetimePosition) {
746 break;
747 }
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000748 } else if (next_use[reg] <= starting_at || next_use[existing_high_register] <= starting_at) {
749 // If one of the current register is known to be unavailable, just inconditionally
750 // try a new one.
751 reg = i;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000752 }
753 }
754 return reg;
755}
756
757int RegisterAllocator::FindAvailableRegister(size_t* next_use) const {
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000758 int reg = kNoRegister;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000759 // Pick the register that is used the last.
760 for (size_t i = 0; i < number_of_registers_; ++i) {
761 if (IsBlocked(i)) continue;
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000762 if (reg == kNoRegister || next_use[i] > next_use[reg]) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000763 reg = i;
764 if (next_use[i] == kMaxLifetimePosition) break;
765 }
766 }
767 return reg;
768}
769
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000770bool RegisterAllocator::TrySplitNonPairIntervalAt(size_t position,
771 size_t first_register_use,
772 size_t* next_use) {
773 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
774 LiveInterval* active = active_.Get(i);
775 DCHECK(active->HasRegister());
776 // Split the first interval found.
777 if (first_register_use <= next_use[active->GetRegister()]
778 && !active->IsLowInterval()
779 && !active->IsHighInterval()) {
780 LiveInterval* split = Split(active, position);
781 active_.DeleteAt(i);
782 if (split != active) {
783 handled_.Add(active);
784 }
785 AddSorted(unhandled_, split);
786 return true;
787 }
788 }
789 return false;
790}
791
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100792// Find the register that is used the last, and spill the interval
793// that holds it. If the first use of `current` is after that register
794// we spill `current` instead.
795bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) {
796 size_t first_register_use = current->FirstRegisterUse();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100797 if (first_register_use == kNoLifetime) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +0100798 AllocateSpillSlotFor(current);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100799 return false;
800 }
801
802 // First set all registers as not being used.
803 size_t* next_use = registers_array_;
804 for (size_t i = 0; i < number_of_registers_; ++i) {
805 next_use[i] = kMaxLifetimePosition;
806 }
807
808 // For each active interval, find the next use of its register after the
809 // start of current.
810 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
811 LiveInterval* active = active_.Get(i);
812 DCHECK(active->HasRegister());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100813 if (active->IsFixed()) {
814 next_use[active->GetRegister()] = current->GetStart();
815 } else {
816 size_t use = active->FirstRegisterUseAfter(current->GetStart());
817 if (use != kNoLifetime) {
818 next_use[active->GetRegister()] = use;
819 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100820 }
821 }
822
823 // For each inactive interval, find the next use of its register after the
824 // start of current.
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100825 for (size_t i = 0, e = inactive_.Size(); i < e; ++i) {
826 LiveInterval* inactive = inactive_.Get(i);
Mingyao Yang296bd602014-10-06 16:47:28 -0700827 // Temp/Slow-path-safepoint interval has no holes.
828 DCHECK(!inactive->IsTemp() && !inactive->IsSlowPathSafepoint());
829 if (!current->IsSplit() && !inactive->IsFixed()) {
830 // Neither current nor inactive are fixed.
831 // Thanks to SSA, a non-split interval starting in a hole of an
832 // inactive interval should never intersect with that inactive interval.
833 // Only if it's not fixed though, because fixed intervals don't come from SSA.
834 DCHECK_EQ(inactive->FirstIntersectionWith(current), kNoLifetime);
835 continue;
836 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100837 DCHECK(inactive->HasRegister());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100838 size_t next_intersection = inactive->FirstIntersectionWith(current);
839 if (next_intersection != kNoLifetime) {
840 if (inactive->IsFixed()) {
841 next_use[inactive->GetRegister()] =
842 std::min(next_intersection, next_use[inactive->GetRegister()]);
843 } else {
844 size_t use = inactive->FirstRegisterUseAfter(current->GetStart());
845 if (use != kNoLifetime) {
846 next_use[inactive->GetRegister()] = std::min(use, next_use[inactive->GetRegister()]);
847 }
848 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100849 }
850 }
851
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000852 int reg = kNoRegister;
853 bool should_spill = false;
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000854 if (current->HasRegister()) {
855 DCHECK(current->IsHighInterval());
856 reg = current->GetRegister();
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000857 // When allocating the low part, we made sure the high register was available.
858 DCHECK_LT(first_register_use, next_use[reg]);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000859 } else if (current->IsLowInterval()) {
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000860 reg = FindAvailableRegisterPair(next_use, current->GetStart());
861 // We should spill if both registers are not available.
862 should_spill = (first_register_use >= next_use[reg])
863 || (first_register_use >= next_use[GetHighForLowRegister(reg)]);
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000864 } else {
865 DCHECK(!current->IsHighInterval());
866 reg = FindAvailableRegister(next_use);
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000867 should_spill = (first_register_use >= next_use[reg]);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100868 }
869
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000870 DCHECK_NE(reg, kNoRegister);
871 if (should_spill) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000872 DCHECK(!current->IsHighInterval());
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000873 bool is_allocation_at_use_site = (current->GetStart() == (first_register_use - 1));
874 if (is_allocation_at_use_site
875 && TrySplitNonPairIntervalAt(current->GetStart(), first_register_use, next_use)) {
876 // If we're allocating a register for `current` because the instruction at
877 // that position requires it, but we think we should spill, then there are
878 // non-pair intervals blocking the allocation. We split the first
879 // interval found, and put ourselves first in the `unhandled_` list.
880 AddSorted(unhandled_, current);
881 } else {
882 // If the first use of that instruction is after the last use of the found
883 // register, we split this interval just before its first register use.
884 AllocateSpillSlotFor(current);
885 LiveInterval* split = Split(current, first_register_use - 1);
886 DCHECK_NE(current, split) << "There is not enough registers available for "
887 << split->GetParent()->GetDefinedBy()->DebugName() << " "
888 << split->GetParent()->GetDefinedBy()->GetId()
889 << " at " << first_register_use - 1;
890 AddSorted(unhandled_, split);
891 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100892 return false;
893 } else {
894 // Use this register and spill the active and inactives interval that
895 // have that register.
896 current->SetRegister(reg);
897
898 for (size_t i = 0, e = active_.Size(); i < e; ++i) {
899 LiveInterval* active = active_.Get(i);
900 if (active->GetRegister() == reg) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100901 DCHECK(!active->IsFixed());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100902 LiveInterval* split = Split(active, current->GetStart());
903 active_.DeleteAt(i);
Nicolas Geoffraydd8f8872015-01-15 15:37:37 +0000904 if (split != active) {
905 handled_.Add(active);
906 }
Nicolas Geoffray39468442014-09-02 15:17:15 +0100907 AddSorted(unhandled_, split);
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000908
909 if (active->IsLowInterval() || active->IsHighInterval()) {
910 LiveInterval* other_half = active->IsLowInterval()
911 ? active->GetHighInterval()
912 : active->GetLowInterval();
913 // We also need to remove the other half from the list of actives.
914 bool found = false;
915 for (size_t j = 0; j < active_.Size(); ++j) {
916 if (active_.Get(j) == other_half) {
917 found = true;
918 active_.DeleteAt(j);
919 handled_.Add(other_half);
920 break;
921 }
922 }
923 DCHECK(found);
924 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100925 break;
926 }
927 }
928
Mingyao Yang296bd602014-10-06 16:47:28 -0700929 for (size_t i = 0, e = inactive_.Size(); i < e; ++i) {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100930 LiveInterval* inactive = inactive_.Get(i);
931 if (inactive->GetRegister() == reg) {
Mingyao Yang296bd602014-10-06 16:47:28 -0700932 if (!current->IsSplit() && !inactive->IsFixed()) {
933 // Neither current nor inactive are fixed.
934 // Thanks to SSA, a non-split interval starting in a hole of an
935 // inactive interval should never intersect with that inactive interval.
936 // Only if it's not fixed though, because fixed intervals don't come from SSA.
937 DCHECK_EQ(inactive->FirstIntersectionWith(current), kNoLifetime);
938 continue;
939 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100940 size_t next_intersection = inactive->FirstIntersectionWith(current);
941 if (next_intersection != kNoLifetime) {
942 if (inactive->IsFixed()) {
943 LiveInterval* split = Split(current, next_intersection);
Nicolas Geoffraydd8f8872015-01-15 15:37:37 +0000944 DCHECK_NE(split, current);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100945 AddSorted(unhandled_, split);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100946 } else {
Nicolas Geoffraydd8f8872015-01-15 15:37:37 +0000947 // Split at the start of `current`, which will lead to splitting
948 // at the end of the lifetime hole of `inactive`.
949 LiveInterval* split = Split(inactive, current->GetStart());
950 // If it's inactive, it must start before the current interval.
951 DCHECK_NE(split, inactive);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100952 inactive_.DeleteAt(i);
Mingyao Yang296bd602014-10-06 16:47:28 -0700953 --i;
954 --e;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100955 handled_.Add(inactive);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100956 AddSorted(unhandled_, split);
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000957
958 if (inactive->IsLowInterval() || inactive->IsHighInterval()) {
959 LiveInterval* other_half = inactive->IsLowInterval()
960 ? inactive->GetHighInterval()
961 : inactive->GetLowInterval();
962
963 // We also need to remove the other half from the list of inactives.
964 bool found = false;
965 for (size_t j = 0; j < inactive_.Size(); ++j) {
966 if (inactive_.Get(j) == other_half) {
967 found = true;
968 inactive_.DeleteAt(j);
969 --e;
970 handled_.Add(other_half);
971 break;
972 }
973 }
974 DCHECK(found);
975 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100976 }
977 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100978 }
979 }
980
981 return true;
982 }
983}
984
Nicolas Geoffray39468442014-09-02 15:17:15 +0100985void RegisterAllocator::AddSorted(GrowableArray<LiveInterval*>* array, LiveInterval* interval) {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +0100986 DCHECK(!interval->IsFixed() && !interval->HasSpillSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100987 size_t insert_at = 0;
Nicolas Geoffray39468442014-09-02 15:17:15 +0100988 for (size_t i = array->Size(); i > 0; --i) {
989 LiveInterval* current = array->Get(i - 1);
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +0000990 // High intervals must be processed right after their low equivalent.
991 if (current->StartsAfter(interval) && !current->IsHighInterval()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100992 insert_at = i;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100993 break;
Nicolas Geoffrayacd03392014-11-26 15:46:52 +0000994 } else if ((current->GetStart() == interval->GetStart()) && current->IsSlowPathSafepoint()) {
995 // Ensure the slow path interval is the last to be processed at its location: we want the
996 // interval to know all live registers at this location.
997 DCHECK(i == 1 || array->Get(i - 2)->StartsAfter(current));
998 insert_at = i;
999 break;
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001000 }
1001 }
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001002
Nicolas Geoffray39468442014-09-02 15:17:15 +01001003 array->InsertAt(insert_at, interval);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001004 // Insert the high interval before the low, to ensure the low is processed before.
1005 if (interval->HasHighInterval()) {
1006 array->InsertAt(insert_at, interval->GetHighInterval());
1007 } else if (interval->HasLowInterval()) {
1008 array->InsertAt(insert_at + 1, interval->GetLowInterval());
1009 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001010}
1011
1012LiveInterval* RegisterAllocator::Split(LiveInterval* interval, size_t position) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001013 DCHECK_GE(position, interval->GetStart());
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001014 DCHECK(!interval->IsDeadAt(position));
1015 if (position == interval->GetStart()) {
1016 // Spill slot will be allocated when handling `interval` again.
1017 interval->ClearRegister();
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001018 if (interval->HasHighInterval()) {
1019 interval->GetHighInterval()->ClearRegister();
1020 } else if (interval->HasLowInterval()) {
1021 interval->GetLowInterval()->ClearRegister();
1022 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001023 return interval;
1024 } else {
1025 LiveInterval* new_interval = interval->SplitAt(position);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001026 if (interval->HasHighInterval()) {
1027 LiveInterval* high = interval->GetHighInterval()->SplitAt(position);
1028 new_interval->SetHighInterval(high);
1029 high->SetLowInterval(new_interval);
1030 } else if (interval->HasLowInterval()) {
1031 LiveInterval* low = interval->GetLowInterval()->SplitAt(position);
1032 new_interval->SetLowInterval(low);
1033 low->SetHighInterval(new_interval);
1034 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001035 return new_interval;
1036 }
1037}
1038
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001039void RegisterAllocator::AllocateSpillSlotFor(LiveInterval* interval) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001040 if (interval->IsHighInterval()) {
1041 // The low interval will contain the spill slot.
1042 return;
1043 }
1044
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001045 LiveInterval* parent = interval->GetParent();
1046
1047 // An instruction gets a spill slot for its entire lifetime. If the parent
1048 // of this interval already has a spill slot, there is nothing to do.
1049 if (parent->HasSpillSlot()) {
1050 return;
1051 }
1052
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001053 HInstruction* defined_by = parent->GetDefinedBy();
1054 if (defined_by->IsParameterValue()) {
1055 // Parameters have their own stack slot.
1056 parent->SetSpillSlot(codegen_->GetStackSlotOfParameter(defined_by->AsParameterValue()));
1057 return;
1058 }
1059
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001060 if (defined_by->IsConstant()) {
1061 // Constants don't need a spill slot.
1062 return;
1063 }
1064
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001065 LiveInterval* last_sibling = interval;
1066 while (last_sibling->GetNextSibling() != nullptr) {
1067 last_sibling = last_sibling->GetNextSibling();
1068 }
1069 size_t end = last_sibling->GetEnd();
1070
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001071 // Find an available spill slot.
1072 size_t slot = 0;
1073 for (size_t e = spill_slots_.Size(); slot < e; ++slot) {
1074 // We check if it is less rather than less or equal because the parallel move
1075 // resolver does not work when a single spill slot needs to be exchanged with
1076 // a double spill slot. The strict comparison avoids needing to exchange these
1077 // locations at the same lifetime position.
1078 if (spill_slots_.Get(slot) < parent->GetStart()
1079 && (slot == (e - 1) || spill_slots_.Get(slot + 1) < parent->GetStart())) {
1080 break;
1081 }
1082 }
1083
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001084 if (parent->NeedsTwoSpillSlots()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001085 if (slot == spill_slots_.Size()) {
1086 // We need a new spill slot.
1087 spill_slots_.Add(end);
1088 spill_slots_.Add(end);
1089 } else if (slot == spill_slots_.Size() - 1) {
1090 spill_slots_.Put(slot, end);
1091 spill_slots_.Add(end);
1092 } else {
1093 spill_slots_.Put(slot, end);
1094 spill_slots_.Put(slot + 1, end);
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001095 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001096 } else {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001097 if (slot == spill_slots_.Size()) {
1098 // We need a new spill slot.
1099 spill_slots_.Add(end);
1100 } else {
1101 spill_slots_.Put(slot, end);
1102 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001103 }
1104
Nicolas Geoffray39468442014-09-02 15:17:15 +01001105 parent->SetSpillSlot((slot + reserved_out_slots_) * kVRegSize);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001106}
1107
Nicolas Geoffray2a877f32014-09-10 10:49:34 +01001108static bool IsValidDestination(Location destination) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001109 return destination.IsRegister()
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +00001110 || destination.IsRegisterPair()
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001111 || destination.IsFpuRegister()
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001112 || destination.IsFpuRegisterPair()
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001113 || destination.IsStackSlot()
1114 || destination.IsDoubleStackSlot();
Nicolas Geoffray2a877f32014-09-10 10:49:34 +01001115}
1116
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001117void RegisterAllocator::AddInputMoveFor(HInstruction* user,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001118 Location source,
1119 Location destination) const {
1120 if (source.Equals(destination)) return;
1121
Roland Levillain476df552014-10-09 17:51:36 +01001122 DCHECK(!user->IsPhi());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001123
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001124 HInstruction* previous = user->GetPrevious();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001125 HParallelMove* move = nullptr;
1126 if (previous == nullptr
Roland Levillain476df552014-10-09 17:51:36 +01001127 || !previous->IsParallelMove()
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001128 || previous->GetLifetimePosition() < user->GetLifetimePosition()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001129 move = new (allocator_) HParallelMove(allocator_);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001130 move->SetLifetimePosition(user->GetLifetimePosition());
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001131 user->GetBlock()->InsertInstructionBefore(move, user);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001132 } else {
1133 move = previous->AsParallelMove();
1134 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001135 DCHECK_EQ(move->GetLifetimePosition(), user->GetLifetimePosition());
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00001136 move->AddMove(source, destination, nullptr);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001137}
1138
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001139static bool IsInstructionStart(size_t position) {
1140 return (position & 1) == 0;
1141}
1142
1143static bool IsInstructionEnd(size_t position) {
1144 return (position & 1) == 1;
1145}
1146
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001147void RegisterAllocator::InsertParallelMoveAt(size_t position,
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001148 HInstruction* instruction,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001149 Location source,
1150 Location destination) const {
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +00001151 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001152 if (source.Equals(destination)) return;
1153
1154 HInstruction* at = liveness_.GetInstructionFromPosition(position / 2);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001155 HParallelMove* move;
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001156 if (at == nullptr) {
1157 if (IsInstructionStart(position)) {
1158 // Block boundary, don't do anything the connection of split siblings will handle it.
1159 return;
1160 } else {
1161 // Move must happen before the first instruction of the block.
1162 at = liveness_.GetInstructionFromPosition((position + 1) / 2);
Nicolas Geoffray59768572014-12-01 09:50:04 +00001163 // Note that parallel moves may have already been inserted, so we explicitly
1164 // ask for the first instruction of the block: `GetInstructionFromPosition` does
1165 // not contain the moves.
1166 at = at->GetBlock()->GetFirstInstruction();
1167 if (at->GetLifetimePosition() != position) {
1168 DCHECK_GT(at->GetLifetimePosition(), position);
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001169 move = new (allocator_) HParallelMove(allocator_);
1170 move->SetLifetimePosition(position);
1171 at->GetBlock()->InsertInstructionBefore(move, at);
Nicolas Geoffray59768572014-12-01 09:50:04 +00001172 } else {
1173 DCHECK(at->IsParallelMove());
1174 move = at->AsParallelMove();
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001175 }
1176 }
1177 } else if (IsInstructionEnd(position)) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001178 // Move must happen after the instruction.
1179 DCHECK(!at->IsControlFlow());
1180 move = at->GetNext()->AsParallelMove();
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001181 // This is a parallel move for connecting siblings in a same block. We need to
1182 // differentiate it with moves for connecting blocks, and input moves.
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01001183 if (move == nullptr || move->GetLifetimePosition() > position) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001184 move = new (allocator_) HParallelMove(allocator_);
1185 move->SetLifetimePosition(position);
1186 at->GetBlock()->InsertInstructionBefore(move, at->GetNext());
1187 }
1188 } else {
1189 // Move must happen before the instruction.
1190 HInstruction* previous = at->GetPrevious();
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001191 if (previous == nullptr
1192 || !previous->IsParallelMove()
1193 || previous->GetLifetimePosition() != position) {
1194 // If the previous is a parallel move, then its position must be lower
1195 // than the given `position`: it was added just after the non-parallel
1196 // move instruction that precedes `instruction`.
1197 DCHECK(previous == nullptr
1198 || !previous->IsParallelMove()
1199 || previous->GetLifetimePosition() < position);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001200 move = new (allocator_) HParallelMove(allocator_);
1201 move->SetLifetimePosition(position);
1202 at->GetBlock()->InsertInstructionBefore(move, at);
1203 } else {
1204 move = previous->AsParallelMove();
1205 }
1206 }
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001207 DCHECK_EQ(move->GetLifetimePosition(), position);
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00001208 move->AddMove(source, destination, instruction);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001209}
1210
1211void RegisterAllocator::InsertParallelMoveAtExitOf(HBasicBlock* block,
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001212 HInstruction* instruction,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001213 Location source,
1214 Location destination) const {
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +00001215 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001216 if (source.Equals(destination)) return;
1217
1218 DCHECK_EQ(block->GetSuccessors().Size(), 1u);
1219 HInstruction* last = block->GetLastInstruction();
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001220 // We insert moves at exit for phi predecessors and connecting blocks.
1221 // A block ending with an if cannot branch to a block with phis because
1222 // we do not allow critical edges. It can also not connect
1223 // a split interval between two blocks: the move has to happen in the successor.
1224 DCHECK(!last->IsIf());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001225 HInstruction* previous = last->GetPrevious();
1226 HParallelMove* move;
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001227 // This is a parallel move for connecting blocks. We need to differentiate
1228 // it with moves for connecting siblings in a same block, and output moves.
Nicolas Geoffray59768572014-12-01 09:50:04 +00001229 size_t position = last->GetLifetimePosition();
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001230 if (previous == nullptr || !previous->IsParallelMove()
Nicolas Geoffray59768572014-12-01 09:50:04 +00001231 || previous->AsParallelMove()->GetLifetimePosition() != position) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001232 move = new (allocator_) HParallelMove(allocator_);
Nicolas Geoffray59768572014-12-01 09:50:04 +00001233 move->SetLifetimePosition(position);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001234 block->InsertInstructionBefore(move, last);
1235 } else {
1236 move = previous->AsParallelMove();
1237 }
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00001238 move->AddMove(source, destination, instruction);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001239}
1240
1241void RegisterAllocator::InsertParallelMoveAtEntryOf(HBasicBlock* block,
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001242 HInstruction* instruction,
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001243 Location source,
1244 Location destination) const {
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +00001245 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001246 if (source.Equals(destination)) return;
1247
1248 HInstruction* first = block->GetFirstInstruction();
1249 HParallelMove* move = first->AsParallelMove();
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001250 // This is a parallel move for connecting blocks. We need to differentiate
1251 // it with moves for connecting siblings in a same block, and input moves.
1252 if (move == nullptr || move->GetLifetimePosition() != block->GetLifetimeStart()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001253 move = new (allocator_) HParallelMove(allocator_);
1254 move->SetLifetimePosition(block->GetLifetimeStart());
1255 block->InsertInstructionBefore(move, first);
1256 }
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00001257 move->AddMove(source, destination, instruction);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001258}
1259
1260void RegisterAllocator::InsertMoveAfter(HInstruction* instruction,
1261 Location source,
1262 Location destination) const {
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +00001263 DCHECK(IsValidDestination(destination)) << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001264 if (source.Equals(destination)) return;
1265
Roland Levillain476df552014-10-09 17:51:36 +01001266 if (instruction->IsPhi()) {
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001267 InsertParallelMoveAtEntryOf(instruction->GetBlock(), instruction, source, destination);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001268 return;
1269 }
1270
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001271 size_t position = instruction->GetLifetimePosition() + 1;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001272 HParallelMove* move = instruction->GetNext()->AsParallelMove();
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001273 // This is a parallel move for moving the output of an instruction. We need
1274 // to differentiate with input moves, moves for connecting siblings in a
1275 // and moves for connecting blocks.
1276 if (move == nullptr || move->GetLifetimePosition() != position) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001277 move = new (allocator_) HParallelMove(allocator_);
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01001278 move->SetLifetimePosition(position);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001279 instruction->GetBlock()->InsertInstructionBefore(move, instruction->GetNext());
1280 }
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00001281 move->AddMove(source, destination, instruction);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001282}
1283
1284void RegisterAllocator::ConnectSiblings(LiveInterval* interval) {
1285 LiveInterval* current = interval;
1286 if (current->HasSpillSlot() && current->HasRegister()) {
1287 // We spill eagerly, so move must be at definition.
1288 InsertMoveAfter(interval->GetDefinedBy(),
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001289 interval->ToLocation(),
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001290 interval->NeedsTwoSpillSlots()
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001291 ? Location::DoubleStackSlot(interval->GetParent()->GetSpillSlot())
1292 : Location::StackSlot(interval->GetParent()->GetSpillSlot()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001293 }
1294 UsePosition* use = current->GetFirstUse();
1295
1296 // Walk over all siblings, updating locations of use positions, and
1297 // connecting them when they are adjacent.
1298 do {
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001299 Location source = current->ToLocation();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001300
1301 // Walk over all uses covered by this interval, and update the location
1302 // information.
1303 while (use != nullptr && use->GetPosition() <= current->GetEnd()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001304 LocationSummary* locations = use->GetUser()->GetLocations();
1305 if (use->GetIsEnvironment()) {
1306 locations->SetEnvironmentAt(use->GetInputIndex(), source);
1307 } else {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001308 Location expected_location = locations->InAt(use->GetInputIndex());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001309 // The expected (actual) location may be invalid in case the input is unused. Currently
1310 // this only happens for intrinsics.
1311 if (expected_location.IsValid()) {
1312 if (expected_location.IsUnallocated()) {
1313 locations->SetInAt(use->GetInputIndex(), source);
1314 } else if (!expected_location.IsConstant()) {
1315 AddInputMoveFor(use->GetUser(), source, expected_location);
1316 }
1317 } else {
1318 DCHECK(use->GetUser()->IsInvoke());
1319 DCHECK(use->GetUser()->AsInvoke()->GetIntrinsic() != Intrinsics::kNone);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001320 }
1321 }
1322 use = use->GetNext();
1323 }
1324
1325 // If the next interval starts just after this one, and has a register,
1326 // insert a move.
1327 LiveInterval* next_sibling = current->GetNextSibling();
1328 if (next_sibling != nullptr
1329 && next_sibling->HasRegister()
1330 && current->GetEnd() == next_sibling->GetStart()) {
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001331 Location destination = next_sibling->ToLocation();
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001332 InsertParallelMoveAt(current->GetEnd(), interval->GetDefinedBy(), source, destination);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001333 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001334
1335 // At each safepoint, we record stack and register information.
1336 for (size_t i = 0, e = safepoints_.Size(); i < e; ++i) {
1337 HInstruction* safepoint = safepoints_.Get(i);
1338 size_t position = safepoint->GetLifetimePosition();
1339 LocationSummary* locations = safepoint->GetLocations();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00001340 if (!current->Covers(position)) {
1341 continue;
1342 }
1343 if (interval->GetStart() == position) {
1344 // The safepoint is for this instruction, so the location of the instruction
1345 // does not need to be saved.
1346 continue;
1347 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001348
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001349 if ((current->GetType() == Primitive::kPrimNot) && current->GetParent()->HasSpillSlot()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001350 locations->SetStackBit(current->GetParent()->GetSpillSlot() / kVRegSize);
1351 }
1352
1353 switch (source.GetKind()) {
1354 case Location::kRegister: {
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001355 locations->AddLiveRegister(source);
Mark Mendellf85a9ca2015-01-13 09:20:58 -05001356 DCHECK_LE(locations->GetNumberOfLiveRegisters(),
1357 maximum_number_of_live_core_registers_ +
1358 maximum_number_of_live_fp_registers_);
Nicolas Geoffray39468442014-09-02 15:17:15 +01001359 if (current->GetType() == Primitive::kPrimNot) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001360 locations->SetRegisterBit(source.reg());
Nicolas Geoffray39468442014-09-02 15:17:15 +01001361 }
1362 break;
1363 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001364 case Location::kFpuRegister: {
1365 locations->AddLiveRegister(source);
1366 break;
1367 }
Nicolas Geoffray41aedbb2015-01-14 10:49:16 +00001368
1369 case Location::kRegisterPair:
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001370 case Location::kFpuRegisterPair: {
1371 locations->AddLiveRegister(source.ToLow());
1372 locations->AddLiveRegister(source.ToHigh());
1373 break;
1374 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001375 case Location::kStackSlot: // Fall-through
1376 case Location::kDoubleStackSlot: // Fall-through
1377 case Location::kConstant: {
1378 // Nothing to do.
1379 break;
1380 }
1381 default: {
1382 LOG(FATAL) << "Unexpected location for object";
1383 }
1384 }
1385 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001386 current = next_sibling;
1387 } while (current != nullptr);
1388 DCHECK(use == nullptr);
1389}
1390
1391void RegisterAllocator::ConnectSplitSiblings(LiveInterval* interval,
1392 HBasicBlock* from,
1393 HBasicBlock* to) const {
1394 if (interval->GetNextSibling() == nullptr) {
1395 // Nothing to connect. The whole range was allocated to the same location.
1396 return;
1397 }
1398
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001399 // Intervals end at the lifetime end of a block. The decrement by one
1400 // ensures the `Cover` call will return true.
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001401 size_t from_position = from->GetLifetimeEnd() - 1;
Nicolas Geoffray46fbaab2014-11-26 18:30:23 +00001402 size_t to_position = to->GetLifetimeStart();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001403
1404 LiveInterval* destination = nullptr;
1405 LiveInterval* source = nullptr;
1406
1407 LiveInterval* current = interval;
1408
1409 // Check the intervals that cover `from` and `to`.
1410 while ((current != nullptr) && (source == nullptr || destination == nullptr)) {
1411 if (current->Covers(from_position)) {
1412 DCHECK(source == nullptr);
1413 source = current;
1414 }
1415 if (current->Covers(to_position)) {
1416 DCHECK(destination == nullptr);
1417 destination = current;
1418 }
1419
1420 current = current->GetNextSibling();
1421 }
1422
1423 if (destination == source) {
1424 // Interval was not split.
1425 return;
1426 }
1427
Nicolas Geoffray8ddb00c2014-09-29 12:00:40 +01001428 DCHECK(destination != nullptr && source != nullptr);
1429
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001430 if (!destination->HasRegister()) {
1431 // Values are eagerly spilled. Spill slot already contains appropriate value.
1432 return;
1433 }
1434
1435 // If `from` has only one successor, we can put the moves at the exit of it. Otherwise
1436 // we need to put the moves at the entry of `to`.
1437 if (from->GetSuccessors().Size() == 1) {
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001438 InsertParallelMoveAtExitOf(from,
1439 interval->GetParent()->GetDefinedBy(),
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001440 source->ToLocation(),
1441 destination->ToLocation());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001442 } else {
1443 DCHECK_EQ(to->GetPredecessors().Size(), 1u);
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001444 InsertParallelMoveAtEntryOf(to,
1445 interval->GetParent()->GetDefinedBy(),
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001446 source->ToLocation(),
1447 destination->ToLocation());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001448 }
1449}
1450
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001451void RegisterAllocator::Resolve() {
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001452 codegen_->ComputeFrameSize(
Mark Mendellf85a9ca2015-01-13 09:20:58 -05001453 spill_slots_.Size(), maximum_number_of_live_core_registers_,
1454 maximum_number_of_live_fp_registers_, reserved_out_slots_);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001455
1456 // Adjust the Out Location of instructions.
1457 // TODO: Use pointers of Location inside LiveInterval to avoid doing another iteration.
1458 for (size_t i = 0, e = liveness_.GetNumberOfSsaValues(); i < e; ++i) {
1459 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
1460 LiveInterval* current = instruction->GetLiveInterval();
1461 LocationSummary* locations = instruction->GetLocations();
1462 Location location = locations->Out();
Roland Levillain476df552014-10-09 17:51:36 +01001463 if (instruction->IsParameterValue()) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001464 // Now that we know the frame size, adjust the parameter's location.
1465 if (location.IsStackSlot()) {
1466 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1467 current->SetSpillSlot(location.GetStackIndex());
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +00001468 locations->UpdateOut(location);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001469 } else if (location.IsDoubleStackSlot()) {
1470 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1471 current->SetSpillSlot(location.GetStackIndex());
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +00001472 locations->UpdateOut(location);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001473 } else if (current->HasSpillSlot()) {
1474 current->SetSpillSlot(current->GetSpillSlot() + codegen_->GetFrameSize());
1475 }
1476 }
1477
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001478 Location source = current->ToLocation();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001479
1480 if (location.IsUnallocated()) {
1481 if (location.GetPolicy() == Location::kSameAsFirstInput) {
Calin Juravled0d48522014-11-04 16:40:20 +00001482 if (locations->InAt(0).IsUnallocated()) {
1483 locations->SetInAt(0, source);
1484 } else {
1485 DCHECK(locations->InAt(0).Equals(source));
1486 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001487 }
1488 locations->SetOut(source);
1489 } else {
1490 DCHECK(source.Equals(location));
1491 }
1492 }
1493
1494 // Connect siblings.
1495 for (size_t i = 0, e = liveness_.GetNumberOfSsaValues(); i < e; ++i) {
1496 HInstruction* instruction = liveness_.GetInstructionFromSsaIndex(i);
1497 ConnectSiblings(instruction->GetLiveInterval());
1498 }
1499
1500 // Resolve non-linear control flow across branches. Order does not matter.
1501 for (HLinearOrderIterator it(liveness_); !it.Done(); it.Advance()) {
1502 HBasicBlock* block = it.Current();
1503 BitVector* live = liveness_.GetLiveInSet(*block);
1504 for (uint32_t idx : live->Indexes()) {
1505 HInstruction* current = liveness_.GetInstructionFromSsaIndex(idx);
1506 LiveInterval* interval = current->GetLiveInterval();
1507 for (size_t i = 0, e = block->GetPredecessors().Size(); i < e; ++i) {
1508 ConnectSplitSiblings(interval, block->GetPredecessors().Get(i), block);
1509 }
1510 }
1511 }
1512
1513 // Resolve phi inputs. Order does not matter.
1514 for (HLinearOrderIterator it(liveness_); !it.Done(); it.Advance()) {
1515 HBasicBlock* current = it.Current();
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001516 for (HInstructionIterator inst_it(current->GetPhis()); !inst_it.Done(); inst_it.Advance()) {
1517 HInstruction* phi = inst_it.Current();
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001518 for (size_t i = 0, e = current->GetPredecessors().Size(); i < e; ++i) {
1519 HBasicBlock* predecessor = current->GetPredecessors().Get(i);
1520 DCHECK_EQ(predecessor->GetSuccessors().Size(), 1u);
1521 HInstruction* input = phi->InputAt(i);
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001522 Location source = input->GetLiveInterval()->GetLocationAt(
1523 predecessor->GetLifetimeEnd() - 1);
1524 Location destination = phi->GetLiveInterval()->ToLocation();
Nicolas Geoffray740475d2014-09-29 10:33:25 +01001525 InsertParallelMoveAtExitOf(predecessor, nullptr, source, destination);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001526 }
1527 }
1528 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001529
1530 // Assign temp locations.
1531 HInstruction* current = nullptr;
1532 size_t temp_index = 0;
1533 for (size_t i = 0; i < temp_intervals_.Size(); ++i) {
1534 LiveInterval* temp = temp_intervals_.Get(i);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001535 if (temp->IsHighInterval()) {
1536 // High intervals can be skipped, they are already handled by the low interval.
1537 continue;
1538 }
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001539 HInstruction* at = liveness_.GetTempUser(temp);
1540 if (at != current) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001541 temp_index = 0;
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001542 current = at;
Nicolas Geoffray39468442014-09-02 15:17:15 +01001543 }
Nicolas Geoffray01ef3452014-10-01 11:32:17 +01001544 LocationSummary* locations = at->GetLocations();
Roland Levillain5368c212014-11-27 15:03:41 +00001545 switch (temp->GetType()) {
1546 case Primitive::kPrimInt:
1547 locations->SetTempAt(
1548 temp_index++, Location::RegisterLocation(temp->GetRegister()));
1549 break;
1550
1551 case Primitive::kPrimDouble:
Nicolas Geoffray840e5462015-01-07 16:01:24 +00001552 if (codegen_->NeedsTwoRegisters(Primitive::kPrimDouble)) {
1553 Location location = Location::FpuRegisterPairLocation(
1554 temp->GetRegister(), temp->GetHighInterval()->GetRegister());
1555 locations->SetTempAt(temp_index++, location);
1556 } else {
1557 locations->SetTempAt(
1558 temp_index++, Location::FpuRegisterLocation(temp->GetRegister()));
1559 }
Roland Levillain5368c212014-11-27 15:03:41 +00001560 break;
1561
1562 default:
1563 LOG(FATAL) << "Unexpected type for temporary location "
1564 << temp->GetType();
1565 }
Nicolas Geoffray39468442014-09-02 15:17:15 +01001566 }
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01001567}
1568
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01001569} // namespace art