blob: d46b904c9ec8cba733c6ae8f60db29f45a03fb6a [file] [log] [blame]
xueliang.zhongc239a2b2017-04-27 15:31:37 +01001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
18#define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_
19
20#include "escape.h"
21#include "nodes.h"
22#include "optimization.h"
23
24namespace art {
25
26// A ReferenceInfo contains additional info about a reference such as
27// whether it's a singleton, returned, etc.
28class ReferenceInfo : public ArenaObject<kArenaAllocMisc> {
29 public:
30 ReferenceInfo(HInstruction* reference, size_t pos)
31 : reference_(reference),
32 position_(pos),
33 is_singleton_(true),
34 is_singleton_and_not_returned_(true),
35 is_singleton_and_not_deopt_visible_(true),
36 has_index_aliasing_(false) {
37 CalculateEscape(reference_,
38 nullptr,
39 &is_singleton_,
40 &is_singleton_and_not_returned_,
41 &is_singleton_and_not_deopt_visible_);
42 }
43
44 HInstruction* GetReference() const {
45 return reference_;
46 }
47
48 size_t GetPosition() const {
49 return position_;
50 }
51
52 // Returns true if reference_ is the only name that can refer to its value during
53 // the lifetime of the method. So it's guaranteed to not have any alias in
54 // the method (including its callees).
55 bool IsSingleton() const {
56 return is_singleton_;
57 }
58
59 // Returns true if reference_ is a singleton and not returned to the caller or
60 // used as an environment local of an HDeoptimize instruction.
61 // The allocation and stores into reference_ may be eliminated for such cases.
62 bool IsSingletonAndRemovable() const {
63 return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_;
64 }
65
66 // Returns true if reference_ is a singleton and returned to the caller or
67 // used as an environment local of an HDeoptimize instruction.
68 bool IsSingletonAndNonRemovable() const {
69 return is_singleton_ &&
70 (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_);
71 }
72
73 bool HasIndexAliasing() {
74 return has_index_aliasing_;
75 }
76
77 void SetHasIndexAliasing(bool has_index_aliasing) {
78 // Only allow setting to true.
79 DCHECK(has_index_aliasing);
80 has_index_aliasing_ = has_index_aliasing;
81 }
82
83 private:
84 HInstruction* const reference_;
85 const size_t position_; // position in HeapLocationCollector's ref_info_array_.
86
87 // Can only be referred to by a single name in the method.
88 bool is_singleton_;
89 // Is singleton and not returned to caller.
90 bool is_singleton_and_not_returned_;
91 // Is singleton and not used as an environment local of HDeoptimize.
92 bool is_singleton_and_not_deopt_visible_;
93 // Some heap locations with reference_ have array index aliasing,
94 // e.g. arr[i] and arr[j] may be the same location.
95 bool has_index_aliasing_;
96
97 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo);
98};
99
100// A heap location is a reference-offset/index pair that a value can be loaded from
101// or stored to.
102class HeapLocation : public ArenaObject<kArenaAllocMisc> {
103 public:
104 static constexpr size_t kInvalidFieldOffset = -1;
105
106 // TODO: more fine-grained array types.
107 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
108
109 HeapLocation(ReferenceInfo* ref_info,
110 size_t offset,
111 HInstruction* index,
112 int16_t declaring_class_def_index)
113 : ref_info_(ref_info),
114 offset_(offset),
115 index_(index),
116 declaring_class_def_index_(declaring_class_def_index),
117 value_killed_by_loop_side_effects_(true) {
118 DCHECK(ref_info != nullptr);
119 DCHECK((offset == kInvalidFieldOffset && index != nullptr) ||
120 (offset != kInvalidFieldOffset && index == nullptr));
121 if (ref_info->IsSingleton() && !IsArrayElement()) {
122 // Assume this location's value cannot be killed by loop side effects
123 // until proven otherwise.
124 value_killed_by_loop_side_effects_ = false;
125 }
126 }
127
128 ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
129 size_t GetOffset() const { return offset_; }
130 HInstruction* GetIndex() const { return index_; }
131
132 // Returns the definition of declaring class' dex index.
133 // It's kDeclaringClassDefIndexForArrays for an array element.
134 int16_t GetDeclaringClassDefIndex() const {
135 return declaring_class_def_index_;
136 }
137
138 bool IsArrayElement() const {
139 return index_ != nullptr;
140 }
141
142 bool IsValueKilledByLoopSideEffects() const {
143 return value_killed_by_loop_side_effects_;
144 }
145
146 void SetValueKilledByLoopSideEffects(bool val) {
147 value_killed_by_loop_side_effects_ = val;
148 }
149
150 private:
151 ReferenceInfo* const ref_info_; // reference for instance/static field or array access.
152 const size_t offset_; // offset of static/instance field.
153 HInstruction* const index_; // index of an array element.
154 const int16_t declaring_class_def_index_; // declaring class's def's dex index.
155 bool value_killed_by_loop_side_effects_; // value of this location may be killed by loop
156 // side effects because this location is stored
157 // into inside a loop. This gives
158 // better info on whether a singleton's location
159 // value may be killed by loop side effects.
160
161 DISALLOW_COPY_AND_ASSIGN(HeapLocation);
162};
163
164// A HeapLocationCollector collects all relevant heap locations and keeps
165// an aliasing matrix for all locations.
166class HeapLocationCollector : public HGraphVisitor {
167 public:
168 static constexpr size_t kHeapLocationNotFound = -1;
169 // Start with a single uint32_t word. That's enough bits for pair-wise
170 // aliasing matrix of 8 heap locations.
171 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32;
172
173 explicit HeapLocationCollector(HGraph* graph)
174 : HGraphVisitor(graph),
175 ref_info_array_(graph->GetArena()->Adapter(kArenaAllocLSE)),
176 heap_locations_(graph->GetArena()->Adapter(kArenaAllocLSE)),
177 aliasing_matrix_(graph->GetArena(),
178 kInitialAliasingMatrixBitVectorSize,
179 true,
180 kArenaAllocLSE),
181 has_heap_stores_(false),
182 has_volatile_(false),
183 has_monitor_operations_(false) {}
184
185 void CleanUp() {
186 heap_locations_.clear();
187 ref_info_array_.clear();
188 }
189
190 size_t GetNumberOfHeapLocations() const {
191 return heap_locations_.size();
192 }
193
194 HeapLocation* GetHeapLocation(size_t index) const {
195 return heap_locations_[index];
196 }
197
198 HInstruction* HuntForOriginalReference(HInstruction* ref) const {
199 DCHECK(ref != nullptr);
200 while (ref->IsNullCheck() || ref->IsBoundType()) {
201 ref = ref->InputAt(0);
202 }
203 return ref;
204 }
205
206 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const {
207 for (size_t i = 0; i < ref_info_array_.size(); i++) {
208 ReferenceInfo* ref_info = ref_info_array_[i];
209 if (ref_info->GetReference() == ref) {
210 DCHECK_EQ(i, ref_info->GetPosition());
211 return ref_info;
212 }
213 }
214 return nullptr;
215 }
216
xueliang.zhong016c0f12017-05-12 18:16:31 +0100217 size_t GetArrayAccessHeapLocation(HInstruction* array, HInstruction* index) const {
218 DCHECK(array != nullptr);
219 DCHECK(index != nullptr);
220 HInstruction* original_ref = HuntForOriginalReference(array);
221 ReferenceInfo* ref_info = FindReferenceInfoOf(original_ref);
222 return FindHeapLocationIndex(ref_info,
223 HeapLocation::kInvalidFieldOffset,
224 index,
225 HeapLocation::kDeclaringClassDefIndexForArrays);
226 }
227
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100228 bool HasHeapStores() const {
229 return has_heap_stores_;
230 }
231
232 bool HasVolatile() const {
233 return has_volatile_;
234 }
235
236 bool HasMonitorOps() const {
237 return has_monitor_operations_;
238 }
239
240 // Find and return the heap location index in heap_locations_.
241 size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
242 size_t offset,
243 HInstruction* index,
244 int16_t declaring_class_def_index) const {
245 for (size_t i = 0; i < heap_locations_.size(); i++) {
246 HeapLocation* loc = heap_locations_[i];
247 if (loc->GetReferenceInfo() == ref_info &&
248 loc->GetOffset() == offset &&
249 loc->GetIndex() == index &&
250 loc->GetDeclaringClassDefIndex() == declaring_class_def_index) {
251 return i;
252 }
253 }
254 return kHeapLocationNotFound;
255 }
256
257 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias.
258 bool MayAlias(size_t index1, size_t index2) const {
259 if (index1 < index2) {
260 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2));
261 } else if (index1 > index2) {
262 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1));
263 } else {
264 DCHECK(false) << "index1 and index2 are expected to be different";
265 return true;
266 }
267 }
268
269 void BuildAliasingMatrix() {
270 const size_t number_of_locations = heap_locations_.size();
271 if (number_of_locations == 0) {
272 return;
273 }
274 size_t pos = 0;
275 // Compute aliasing info between every pair of different heap locations.
276 // Save the result in a matrix represented as a BitVector.
277 for (size_t i = 0; i < number_of_locations - 1; i++) {
278 for (size_t j = i + 1; j < number_of_locations; j++) {
279 if (ComputeMayAlias(i, j)) {
280 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos));
281 }
282 pos++;
283 }
284 }
285 }
286
287 private:
288 // An allocation cannot alias with a name which already exists at the point
289 // of the allocation, such as a parameter or a load happening before the allocation.
290 bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
291 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) {
292 // Any reference that can alias with the allocation must appear after it in the block/in
293 // the block's successors. In reverse post order, those instructions will be visited after
294 // the allocation.
295 return ref_info2->GetPosition() >= ref_info1->GetPosition();
296 }
297 return true;
298 }
299
300 bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) const {
301 if (ref_info1 == ref_info2) {
302 return true;
303 } else if (ref_info1->IsSingleton()) {
304 return false;
305 } else if (ref_info2->IsSingleton()) {
306 return false;
307 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) ||
308 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) {
309 return false;
310 }
311 return true;
312 }
313
xueliang.zhong016c0f12017-05-12 18:16:31 +0100314 bool CanArrayIndicesAlias(const HInstruction* i1, const HInstruction* i2) const;
315
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100316 // `index1` and `index2` are indices in the array of collected heap locations.
317 // Returns the position in the bit vector that tracks whether the two heap
318 // locations may alias.
319 size_t AliasingMatrixPosition(size_t index1, size_t index2) const {
320 DCHECK(index2 > index1);
321 const size_t number_of_locations = heap_locations_.size();
322 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1).
323 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1));
324 }
325
326 // An additional position is passed in to make sure the calculated position is correct.
327 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) {
328 size_t calculated_position = AliasingMatrixPosition(index1, index2);
329 DCHECK_EQ(calculated_position, position);
330 return calculated_position;
331 }
332
333 // Compute if two locations may alias to each other.
334 bool ComputeMayAlias(size_t index1, size_t index2) const {
335 HeapLocation* loc1 = heap_locations_[index1];
336 HeapLocation* loc2 = heap_locations_[index2];
337 if (loc1->GetOffset() != loc2->GetOffset()) {
338 // Either two different instance fields, or one is an instance
339 // field and the other is an array element.
340 return false;
341 }
342 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) {
343 // Different types.
344 return false;
345 }
346 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) {
347 return false;
348 }
349 if (loc1->IsArrayElement() && loc2->IsArrayElement()) {
350 HInstruction* array_index1 = loc1->GetIndex();
351 HInstruction* array_index2 = loc2->GetIndex();
xueliang.zhong016c0f12017-05-12 18:16:31 +0100352 if (!CanArrayIndicesAlias(array_index1, array_index2)) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100353 return false;
354 }
355 ReferenceInfo* ref_info = loc1->GetReferenceInfo();
356 ref_info->SetHasIndexAliasing(true);
357 }
358 return true;
359 }
360
361 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) {
362 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction);
363 if (ref_info == nullptr) {
364 size_t pos = ref_info_array_.size();
365 ref_info = new (GetGraph()->GetArena()) ReferenceInfo(instruction, pos);
366 ref_info_array_.push_back(ref_info);
367 }
368 return ref_info;
369 }
370
371 void CreateReferenceInfoForReferenceType(HInstruction* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100372 if (instruction->GetType() != DataType::Type::kReference) {
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100373 return;
374 }
375 DCHECK(FindReferenceInfoOf(instruction) == nullptr);
376 GetOrCreateReferenceInfo(instruction);
377 }
378
379 HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
380 size_t offset,
381 HInstruction* index,
382 int16_t declaring_class_def_index) {
383 HInstruction* original_ref = HuntForOriginalReference(ref);
384 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
385 size_t heap_location_idx = FindHeapLocationIndex(
386 ref_info, offset, index, declaring_class_def_index);
387 if (heap_location_idx == kHeapLocationNotFound) {
388 HeapLocation* heap_loc = new (GetGraph()->GetArena())
389 HeapLocation(ref_info, offset, index, declaring_class_def_index);
390 heap_locations_.push_back(heap_loc);
391 return heap_loc;
392 }
393 return heap_locations_[heap_location_idx];
394 }
395
396 HeapLocation* VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) {
397 if (field_info.IsVolatile()) {
398 has_volatile_ = true;
399 }
400 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
401 const size_t offset = field_info.GetFieldOffset().SizeValue();
402 return GetOrCreateHeapLocation(ref, offset, nullptr, declaring_class_def_index);
403 }
404
405 void VisitArrayAccess(HInstruction* array, HInstruction* index) {
406 GetOrCreateHeapLocation(array, HeapLocation::kInvalidFieldOffset,
407 index, HeapLocation::kDeclaringClassDefIndexForArrays);
408 }
409
410 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
411 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
412 CreateReferenceInfoForReferenceType(instruction);
413 }
414
415 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
416 HeapLocation* location = VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
417 has_heap_stores_ = true;
418 if (location->GetReferenceInfo()->IsSingleton()) {
419 // A singleton's location value may be killed by loop side effects if it's
420 // defined before that loop, and it's stored into inside that loop.
421 HLoopInformation* loop_info = instruction->GetBlock()->GetLoopInformation();
422 if (loop_info != nullptr) {
423 HInstruction* ref = location->GetReferenceInfo()->GetReference();
424 DCHECK(ref->IsNewInstance());
425 if (loop_info->IsDefinedOutOfTheLoop(ref)) {
426 // ref's location value may be killed by this loop's side effects.
427 location->SetValueKilledByLoopSideEffects(true);
428 } else {
429 // ref is defined inside this loop so this loop's side effects cannot
430 // kill its location value at the loop header since ref/its location doesn't
431 // exist yet at the loop header.
432 }
433 }
434 } else {
435 // For non-singletons, value_killed_by_loop_side_effects_ is inited to
436 // true.
437 DCHECK_EQ(location->IsValueKilledByLoopSideEffects(), true);
438 }
439 }
440
441 void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
442 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
443 CreateReferenceInfoForReferenceType(instruction);
444 }
445
446 void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
447 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo());
448 has_heap_stores_ = true;
449 }
450
451 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses
452 // since we cannot accurately track the fields.
453
454 void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
455 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
456 CreateReferenceInfoForReferenceType(instruction);
457 }
458
459 void VisitArraySet(HArraySet* instruction) OVERRIDE {
460 VisitArrayAccess(instruction->InputAt(0), instruction->InputAt(1));
461 has_heap_stores_ = true;
462 }
463
Mingyao Yangfef28842017-08-28 15:20:57 -0700464 void VisitInstruction(HInstruction* instruction) OVERRIDE {
465 // Any new-instance or new-array cannot alias with references that
466 // pre-exist the new-instance/new-array. We append entries into
467 // ref_info_array_ which keeps track of the order of creation
468 // of reference values since we visit the blocks in reverse post order.
469 //
470 // By default, VisitXXX() (including VisitPhi()) calls VisitInstruction(),
471 // unless VisitXXX() is overridden. VisitInstanceFieldGet() etc. above
472 // also call CreateReferenceInfoForReferenceType() explicitly.
xueliang.zhongc239a2b2017-04-27 15:31:37 +0100473 CreateReferenceInfoForReferenceType(instruction);
474 }
475
476 void VisitMonitorOperation(HMonitorOperation* monitor ATTRIBUTE_UNUSED) OVERRIDE {
477 has_monitor_operations_ = true;
478 }
479
480 ArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses.
481 ArenaVector<HeapLocation*> heap_locations_; // All heap locations.
482 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations.
483 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better
484 // alias analysis and won't be as effective.
485 bool has_volatile_; // If there are volatile field accesses.
486 bool has_monitor_operations_; // If there are monitor operations.
487
488 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector);
489};
490
491class LoadStoreAnalysis : public HOptimization {
492 public:
493 explicit LoadStoreAnalysis(HGraph* graph)
494 : HOptimization(graph, kLoadStoreAnalysisPassName),
495 heap_location_collector_(graph) {}
496
497 const HeapLocationCollector& GetHeapLocationCollector() const {
498 return heap_location_collector_;
499 }
500
501 void Run() OVERRIDE;
502
503 static constexpr const char* kLoadStoreAnalysisPassName = "load_store_analysis";
504
505 private:
506 HeapLocationCollector heap_location_collector_;
507
508 DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis);
509};
510
511} // namespace art
512
513#endif // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_