blob: bad73e1b6114ba0580387f72a28f6bb10f640b47 [file] [log] [blame]
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "register_allocator.h"
18
19#include <iostream>
20#include <sstream>
21
Vladimir Markoe764d2e2017-10-05 14:35:55 +010022#include "base/scoped_arena_allocator.h"
23#include "base/scoped_arena_containers.h"
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070024#include "base/bit_vector-inl.h"
25#include "code_generator.h"
Matthew Gharrityd9ffd0d2016-06-22 10:27:55 -070026#include "register_allocator_graph_color.h"
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070027#include "register_allocator_linear_scan.h"
28#include "ssa_liveness_analysis.h"
29
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070030namespace art {
31
Vladimir Markoe764d2e2017-10-05 14:35:55 +010032RegisterAllocator::RegisterAllocator(ScopedArenaAllocator* allocator,
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070033 CodeGenerator* codegen,
34 const SsaLivenessAnalysis& liveness)
35 : allocator_(allocator),
36 codegen_(codegen),
37 liveness_(liveness) {}
38
Vladimir Markoe764d2e2017-10-05 14:35:55 +010039std::unique_ptr<RegisterAllocator> RegisterAllocator::Create(ScopedArenaAllocator* allocator,
40 CodeGenerator* codegen,
41 const SsaLivenessAnalysis& analysis,
42 Strategy strategy) {
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070043 switch (strategy) {
44 case kRegisterAllocatorLinearScan:
Vladimir Markoe764d2e2017-10-05 14:35:55 +010045 return std::unique_ptr<RegisterAllocator>(
46 new (allocator) RegisterAllocatorLinearScan(allocator, codegen, analysis));
Matthew Gharrityd9ffd0d2016-06-22 10:27:55 -070047 case kRegisterAllocatorGraphColor:
Vladimir Markoe764d2e2017-10-05 14:35:55 +010048 return std::unique_ptr<RegisterAllocator>(
49 new (allocator) RegisterAllocatorGraphColor(allocator, codegen, analysis));
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070050 default:
51 LOG(FATAL) << "Invalid register allocation strategy: " << strategy;
52 UNREACHABLE();
53 }
54}
55
Vladimir Markobea75ff2017-10-11 20:39:54 +010056RegisterAllocator::~RegisterAllocator() {
57 if (kIsDebugBuild) {
58 // Poison live interval pointers with "Error: BAD 71ve1nt3rval."
59 LiveInterval* bad_live_interval = reinterpret_cast<LiveInterval*>(0xebad7113u);
60 for (HBasicBlock* block : codegen_->GetGraph()->GetLinearOrder()) {
61 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
62 it.Current()->SetLiveInterval(bad_live_interval);
63 }
64 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
65 it.Current()->SetLiveInterval(bad_live_interval);
66 }
67 }
68 }
69}
70
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070071bool RegisterAllocator::CanAllocateRegistersFor(const HGraph& graph ATTRIBUTE_UNUSED,
72 InstructionSet instruction_set) {
Vladimir Marko33bff252017-11-01 14:35:42 +000073 return instruction_set == InstructionSet::kArm
74 || instruction_set == InstructionSet::kArm64
75 || instruction_set == InstructionSet::kMips
76 || instruction_set == InstructionSet::kMips64
77 || instruction_set == InstructionSet::kThumb2
78 || instruction_set == InstructionSet::kX86
79 || instruction_set == InstructionSet::kX86_64;
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -070080}
81
82class AllRangesIterator : public ValueObject {
83 public:
84 explicit AllRangesIterator(LiveInterval* interval)
85 : current_interval_(interval),
86 current_range_(interval->GetFirstRange()) {}
87
88 bool Done() const { return current_interval_ == nullptr; }
89 LiveRange* CurrentRange() const { return current_range_; }
90 LiveInterval* CurrentInterval() const { return current_interval_; }
91
92 void Advance() {
93 current_range_ = current_range_->GetNext();
94 if (current_range_ == nullptr) {
95 current_interval_ = current_interval_->GetNextSibling();
96 if (current_interval_ != nullptr) {
97 current_range_ = current_interval_->GetFirstRange();
98 }
99 }
100 }
101
102 private:
103 LiveInterval* current_interval_;
104 LiveRange* current_range_;
105
106 DISALLOW_COPY_AND_ASSIGN(AllRangesIterator);
107};
108
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100109bool RegisterAllocator::ValidateIntervals(ArrayRef<LiveInterval* const> intervals,
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -0700110 size_t number_of_spill_slots,
111 size_t number_of_out_slots,
112 const CodeGenerator& codegen,
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -0700113 bool processing_core_registers,
114 bool log_fatal_on_failure) {
115 size_t number_of_registers = processing_core_registers
116 ? codegen.GetNumberOfCoreRegisters()
117 : codegen.GetNumberOfFloatingPointRegisters();
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100118 ScopedArenaAllocator allocator(codegen.GetGraph()->GetArenaStack());
119 ScopedArenaVector<ArenaBitVector*> liveness_of_values(
120 allocator.Adapter(kArenaAllocRegisterAllocatorValidate));
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -0700121 liveness_of_values.reserve(number_of_registers + number_of_spill_slots);
122
123 size_t max_end = 0u;
124 for (LiveInterval* start_interval : intervals) {
125 for (AllRangesIterator it(start_interval); !it.Done(); it.Advance()) {
126 max_end = std::max(max_end, it.CurrentRange()->GetEnd());
127 }
128 }
129
130 // Allocate a bit vector per register. A live interval that has a register
131 // allocated will populate the associated bit vector based on its live ranges.
132 for (size_t i = 0; i < number_of_registers + number_of_spill_slots; ++i) {
133 liveness_of_values.push_back(
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100134 ArenaBitVector::Create(&allocator, max_end, false, kArenaAllocRegisterAllocatorValidate));
135 liveness_of_values.back()->ClearAllBits();
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -0700136 }
137
138 for (LiveInterval* start_interval : intervals) {
139 for (AllRangesIterator it(start_interval); !it.Done(); it.Advance()) {
140 LiveInterval* current = it.CurrentInterval();
141 HInstruction* defined_by = current->GetParent()->GetDefinedBy();
142 if (current->GetParent()->HasSpillSlot()
143 // Parameters and current method have their own stack slot.
144 && !(defined_by != nullptr && (defined_by->IsParameterValue()
145 || defined_by->IsCurrentMethod()))) {
146 BitVector* liveness_of_spill_slot = liveness_of_values[number_of_registers
147 + current->GetParent()->GetSpillSlot() / kVRegSize
148 - number_of_out_slots];
149 for (size_t j = it.CurrentRange()->GetStart(); j < it.CurrentRange()->GetEnd(); ++j) {
150 if (liveness_of_spill_slot->IsBitSet(j)) {
151 if (log_fatal_on_failure) {
152 std::ostringstream message;
153 message << "Spill slot conflict at " << j;
154 LOG(FATAL) << message.str();
155 } else {
156 return false;
157 }
158 } else {
159 liveness_of_spill_slot->SetBit(j);
160 }
161 }
162 }
163
164 if (current->HasRegister()) {
165 if (kIsDebugBuild && log_fatal_on_failure && !current->IsFixed()) {
166 // Only check when an error is fatal. Only tests code ask for non-fatal failures
167 // and test code may not properly fill the right information to the code generator.
168 CHECK(codegen.HasAllocatedRegister(processing_core_registers, current->GetRegister()));
169 }
170 BitVector* liveness_of_register = liveness_of_values[current->GetRegister()];
171 for (size_t j = it.CurrentRange()->GetStart(); j < it.CurrentRange()->GetEnd(); ++j) {
172 if (liveness_of_register->IsBitSet(j)) {
173 if (current->IsUsingInputRegister() && current->CanUseInputRegister()) {
174 continue;
175 }
176 if (log_fatal_on_failure) {
177 std::ostringstream message;
178 message << "Register conflict at " << j << " ";
179 if (defined_by != nullptr) {
180 message << "(" << defined_by->DebugName() << ")";
181 }
182 message << "for ";
183 if (processing_core_registers) {
184 codegen.DumpCoreRegister(message, current->GetRegister());
185 } else {
186 codegen.DumpFloatingPointRegister(message, current->GetRegister());
187 }
Matthew Gharrityd9ffd0d2016-06-22 10:27:55 -0700188 for (LiveInterval* interval : intervals) {
189 if (interval->HasRegister()
190 && interval->GetRegister() == current->GetRegister()
191 && interval->CoversSlow(j)) {
192 message << std::endl;
193 if (interval->GetDefinedBy() != nullptr) {
194 message << interval->GetDefinedBy()->GetKind() << " ";
195 } else {
196 message << "physical ";
197 }
198 interval->Dump(message);
199 }
200 }
Matthew Gharrity8f49d4b2016-07-14 13:24:00 -0700201 LOG(FATAL) << message.str();
202 } else {
203 return false;
204 }
205 } else {
206 liveness_of_register->SetBit(j);
207 }
208 }
209 }
210 }
211 }
212 return true;
213}
214
215LiveInterval* RegisterAllocator::Split(LiveInterval* interval, size_t position) {
216 DCHECK_GE(position, interval->GetStart());
217 DCHECK(!interval->IsDeadAt(position));
218 if (position == interval->GetStart()) {
219 // Spill slot will be allocated when handling `interval` again.
220 interval->ClearRegister();
221 if (interval->HasHighInterval()) {
222 interval->GetHighInterval()->ClearRegister();
223 } else if (interval->HasLowInterval()) {
224 interval->GetLowInterval()->ClearRegister();
225 }
226 return interval;
227 } else {
228 LiveInterval* new_interval = interval->SplitAt(position);
229 if (interval->HasHighInterval()) {
230 LiveInterval* high = interval->GetHighInterval()->SplitAt(position);
231 new_interval->SetHighInterval(high);
232 high->SetLowInterval(new_interval);
233 } else if (interval->HasLowInterval()) {
234 LiveInterval* low = interval->GetLowInterval()->SplitAt(position);
235 new_interval->SetLowInterval(low);
236 low->SetHighInterval(new_interval);
237 }
238 return new_interval;
239 }
240}
241
242LiveInterval* RegisterAllocator::SplitBetween(LiveInterval* interval, size_t from, size_t to) {
243 HBasicBlock* block_from = liveness_.GetBlockFromPosition(from / 2);
244 HBasicBlock* block_to = liveness_.GetBlockFromPosition(to / 2);
245 DCHECK(block_from != nullptr);
246 DCHECK(block_to != nullptr);
247
248 // Both locations are in the same block. We split at the given location.
249 if (block_from == block_to) {
250 return Split(interval, to);
251 }
252
253 /*
254 * Non-linear control flow will force moves at every branch instruction to the new location.
255 * To avoid having all branches doing the moves, we find the next non-linear position and
256 * split the interval at this position. Take the following example (block number is the linear
257 * order position):
258 *
259 * B1
260 * / \
261 * B2 B3
262 * \ /
263 * B4
264 *
265 * B2 needs to split an interval, whose next use is in B4. If we were to split at the
266 * beginning of B4, B3 would need to do a move between B3 and B4 to ensure the interval
267 * is now in the correct location. It makes performance worst if the interval is spilled
268 * and both B2 and B3 need to reload it before entering B4.
269 *
270 * By splitting at B3, we give a chance to the register allocator to allocate the
271 * interval to the same register as in B1, and therefore avoid doing any
272 * moves in B3.
273 */
274 if (block_from->GetDominator() != nullptr) {
275 for (HBasicBlock* dominated : block_from->GetDominator()->GetDominatedBlocks()) {
276 size_t position = dominated->GetLifetimeStart();
277 if ((position > from) && (block_to->GetLifetimeStart() > position)) {
278 // Even if we found a better block, we continue iterating in case
279 // a dominated block is closer.
280 // Note that dominated blocks are not sorted in liveness order.
281 block_to = dominated;
282 DCHECK_NE(block_to, block_from);
283 }
284 }
285 }
286
287 // If `to` is in a loop, find the outermost loop header which does not contain `from`.
288 for (HLoopInformationOutwardIterator it(*block_to); !it.Done(); it.Advance()) {
289 HBasicBlock* header = it.Current()->GetHeader();
290 if (block_from->GetLifetimeStart() >= header->GetLifetimeStart()) {
291 break;
292 }
293 block_to = header;
294 }
295
296 // Split at the start of the found block, to piggy back on existing moves
297 // due to resolution if non-linear control flow (see `ConnectSplitSiblings`).
298 return Split(interval, block_to->GetLifetimeStart());
299}
300
301} // namespace art