blob: a277da883179f7dbd34bffad4e4c2be60c783d70 [file] [log] [blame]
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "hydrogen.h"
29#include "hydrogen-gvn.h"
30#include "v8.h"
31
32namespace v8 {
33namespace internal {
34
35class HValueMap: public ZoneObject {
36 public:
37 explicit HValueMap(Zone* zone)
38 : array_size_(0),
39 lists_size_(0),
40 count_(0),
41 present_flags_(0),
42 array_(NULL),
43 lists_(NULL),
44 free_list_head_(kNil) {
45 ResizeLists(kInitialSize, zone);
46 Resize(kInitialSize, zone);
47 }
48
49 void Kill(GVNFlagSet flags);
50
51 void Add(HValue* value, Zone* zone) {
52 present_flags_.Add(value->gvn_flags());
53 Insert(value, zone);
54 }
55
56 HValue* Lookup(HValue* value) const;
57
58 HValueMap* Copy(Zone* zone) const {
59 return new(zone) HValueMap(zone, this);
60 }
61
62 bool IsEmpty() const { return count_ == 0; }
63
64 private:
65 // A linked list of HValue* values. Stored in arrays.
66 struct HValueMapListElement {
67 HValue* value;
68 int next; // Index in the array of the next list element.
69 };
70 static const int kNil = -1; // The end of a linked list
71
72 // Must be a power of 2.
73 static const int kInitialSize = 16;
74
75 HValueMap(Zone* zone, const HValueMap* other);
76
77 void Resize(int new_size, Zone* zone);
78 void ResizeLists(int new_size, Zone* zone);
79 void Insert(HValue* value, Zone* zone);
80 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
81
82 int array_size_;
83 int lists_size_;
84 int count_; // The number of values stored in the HValueMap.
85 GVNFlagSet present_flags_; // All flags that are in any value in the
86 // HValueMap.
87 HValueMapListElement* array_; // Primary store - contains the first value
88 // with a given hash. Colliding elements are stored in linked lists.
89 HValueMapListElement* lists_; // The linked lists containing hash collisions.
90 int free_list_head_; // Unused elements in lists_ are on the free list.
91};
92
93
94class HSideEffectMap BASE_EMBEDDED {
95 public:
96 HSideEffectMap();
97 explicit HSideEffectMap(HSideEffectMap* other);
98 HSideEffectMap& operator= (const HSideEffectMap& other);
99
100 void Kill(GVNFlagSet flags);
101
102 void Store(GVNFlagSet flags, HInstruction* instr);
103
104 bool IsEmpty() const { return count_ == 0; }
105
106 inline HInstruction* operator[](int i) const {
107 ASSERT(0 <= i);
108 ASSERT(i < kNumberOfTrackedSideEffects);
109 return data_[i];
110 }
111 inline HInstruction* at(int i) const { return operator[](i); }
112
113 private:
114 int count_;
115 HInstruction* data_[kNumberOfTrackedSideEffects];
116};
117
118
119void TraceGVN(const char* msg, ...) {
120 va_list arguments;
121 va_start(arguments, msg);
122 OS::VPrint(msg, arguments);
123 va_end(arguments);
124}
125
126// Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
127// --trace-gvn is off.
128#define TRACE_GVN_1(msg, a1) \
129 if (FLAG_trace_gvn) { \
130 TraceGVN(msg, a1); \
131 }
132
133#define TRACE_GVN_2(msg, a1, a2) \
134 if (FLAG_trace_gvn) { \
135 TraceGVN(msg, a1, a2); \
136 }
137
138#define TRACE_GVN_3(msg, a1, a2, a3) \
139 if (FLAG_trace_gvn) { \
140 TraceGVN(msg, a1, a2, a3); \
141 }
142
143#define TRACE_GVN_4(msg, a1, a2, a3, a4) \
144 if (FLAG_trace_gvn) { \
145 TraceGVN(msg, a1, a2, a3, a4); \
146 }
147
148#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
149 if (FLAG_trace_gvn) { \
150 TraceGVN(msg, a1, a2, a3, a4, a5); \
151 }
152
153
154HValueMap::HValueMap(Zone* zone, const HValueMap* other)
155 : array_size_(other->array_size_),
156 lists_size_(other->lists_size_),
157 count_(other->count_),
158 present_flags_(other->present_flags_),
159 array_(zone->NewArray<HValueMapListElement>(other->array_size_)),
160 lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)),
161 free_list_head_(other->free_list_head_) {
162 OS::MemCopy(
163 array_, other->array_, array_size_ * sizeof(HValueMapListElement));
164 OS::MemCopy(
165 lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
166}
167
168
169void HValueMap::Kill(GVNFlagSet flags) {
170 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags);
171 if (!present_flags_.ContainsAnyOf(depends_flags)) return;
172 present_flags_.RemoveAll();
173 for (int i = 0; i < array_size_; ++i) {
174 HValue* value = array_[i].value;
175 if (value != NULL) {
176 // Clear list of collisions first, so we know if it becomes empty.
177 int kept = kNil; // List of kept elements.
178 int next;
179 for (int current = array_[i].next; current != kNil; current = next) {
180 next = lists_[current].next;
181 HValue* value = lists_[current].value;
182 if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
183 // Drop it.
184 count_--;
185 lists_[current].next = free_list_head_;
186 free_list_head_ = current;
187 } else {
188 // Keep it.
189 lists_[current].next = kept;
190 kept = current;
191 present_flags_.Add(value->gvn_flags());
192 }
193 }
194 array_[i].next = kept;
195
196 // Now possibly drop directly indexed element.
197 value = array_[i].value;
198 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
199 count_--;
200 int head = array_[i].next;
201 if (head == kNil) {
202 array_[i].value = NULL;
203 } else {
204 array_[i].value = lists_[head].value;
205 array_[i].next = lists_[head].next;
206 lists_[head].next = free_list_head_;
207 free_list_head_ = head;
208 }
209 } else {
210 present_flags_.Add(value->gvn_flags()); // Keep it.
211 }
212 }
213 }
214}
215
216
217HValue* HValueMap::Lookup(HValue* value) const {
218 uint32_t hash = static_cast<uint32_t>(value->Hashcode());
219 uint32_t pos = Bound(hash);
220 if (array_[pos].value != NULL) {
221 if (array_[pos].value->Equals(value)) return array_[pos].value;
222 int next = array_[pos].next;
223 while (next != kNil) {
224 if (lists_[next].value->Equals(value)) return lists_[next].value;
225 next = lists_[next].next;
226 }
227 }
228 return NULL;
229}
230
231
232void HValueMap::Resize(int new_size, Zone* zone) {
233 ASSERT(new_size > count_);
234 // Hashing the values into the new array has no more collisions than in the
235 // old hash map, so we can use the existing lists_ array, if we are careful.
236
237 // Make sure we have at least one free element.
238 if (free_list_head_ == kNil) {
239 ResizeLists(lists_size_ << 1, zone);
240 }
241
242 HValueMapListElement* new_array =
243 zone->NewArray<HValueMapListElement>(new_size);
244 memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
245
246 HValueMapListElement* old_array = array_;
247 int old_size = array_size_;
248
249 int old_count = count_;
250 count_ = 0;
251 // Do not modify present_flags_. It is currently correct.
252 array_size_ = new_size;
253 array_ = new_array;
254
255 if (old_array != NULL) {
256 // Iterate over all the elements in lists, rehashing them.
257 for (int i = 0; i < old_size; ++i) {
258 if (old_array[i].value != NULL) {
259 int current = old_array[i].next;
260 while (current != kNil) {
261 Insert(lists_[current].value, zone);
262 int next = lists_[current].next;
263 lists_[current].next = free_list_head_;
264 free_list_head_ = current;
265 current = next;
266 }
267 // Rehash the directly stored value.
268 Insert(old_array[i].value, zone);
269 }
270 }
271 }
272 USE(old_count);
273 ASSERT(count_ == old_count);
274}
275
276
277void HValueMap::ResizeLists(int new_size, Zone* zone) {
278 ASSERT(new_size > lists_size_);
279
280 HValueMapListElement* new_lists =
281 zone->NewArray<HValueMapListElement>(new_size);
282 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
283
284 HValueMapListElement* old_lists = lists_;
285 int old_size = lists_size_;
286
287 lists_size_ = new_size;
288 lists_ = new_lists;
289
290 if (old_lists != NULL) {
291 OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
292 }
293 for (int i = old_size; i < lists_size_; ++i) {
294 lists_[i].next = free_list_head_;
295 free_list_head_ = i;
296 }
297}
298
299
300void HValueMap::Insert(HValue* value, Zone* zone) {
301 ASSERT(value != NULL);
302 // Resizing when half of the hashtable is filled up.
303 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
304 ASSERT(count_ < array_size_);
305 count_++;
306 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
307 if (array_[pos].value == NULL) {
308 array_[pos].value = value;
309 array_[pos].next = kNil;
310 } else {
311 if (free_list_head_ == kNil) {
312 ResizeLists(lists_size_ << 1, zone);
313 }
314 int new_element_pos = free_list_head_;
315 ASSERT(new_element_pos != kNil);
316 free_list_head_ = lists_[free_list_head_].next;
317 lists_[new_element_pos].value = value;
318 lists_[new_element_pos].next = array_[pos].next;
319 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
320 array_[pos].next = new_element_pos;
321 }
322}
323
324
325HSideEffectMap::HSideEffectMap() : count_(0) {
326 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
327}
328
329
330HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
331 *this = *other; // Calls operator=.
332}
333
334
335HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
336 if (this != &other) {
337 OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
338 }
339 return *this;
340}
341
342void HSideEffectMap::Kill(GVNFlagSet flags) {
343 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
344 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
345 if (flags.Contains(changes_flag)) {
346 if (data_[i] != NULL) count_--;
347 data_[i] = NULL;
348 }
349 }
350}
351
352
353void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
354 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
355 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
356 if (flags.Contains(changes_flag)) {
357 if (data_[i] == NULL) count_++;
358 data_[i] = instr;
359 }
360 }
361}
362
363
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000364HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph)
365 : HPhase("H_Global value numbering", graph),
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000366 removed_side_effects_(false),
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000367 block_side_effects_(graph->blocks()->length(), zone()),
368 loop_side_effects_(graph->blocks()->length(), zone()),
369 visited_on_paths_(zone(), graph->blocks()->length()) {
rossberg@chromium.org79e79022013-06-03 15:43:46 +0000370 ASSERT(!AllowHandleAllocation::IsAllowed());
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000371 block_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(),
372 zone());
373 loop_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(),
374 zone());
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000375 }
376
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000377void HGlobalValueNumberingPhase::Analyze() {
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000378 removed_side_effects_ = false;
379 ComputeBlockSideEffects();
380 if (FLAG_loop_invariant_code_motion) {
381 LoopInvariantCodeMotion();
382 }
383 AnalyzeGraph();
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000384}
385
386
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000387void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000388 // The Analyze phase of GVN can be called multiple times. Clear loop side
389 // effects before computing them to erase the contents from previous Analyze
390 // passes.
391 for (int i = 0; i < loop_side_effects_.length(); ++i) {
392 loop_side_effects_[i].RemoveAll();
393 }
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000394 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000395 // Compute side effects for the block.
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000396 HBasicBlock* block = graph()->blocks()->at(i);
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000397 HInstruction* instr = block->first();
398 int id = block->block_id();
399 GVNFlagSet side_effects;
400 while (instr != NULL) {
401 side_effects.Add(instr->ChangesFlags());
402 if (instr->IsSoftDeoptimize()) {
403 block_side_effects_[id].RemoveAll();
404 side_effects.RemoveAll();
405 break;
406 }
407 instr = instr->next();
408 }
409 block_side_effects_[id].Add(side_effects);
410
411 // Loop headers are part of their loop.
412 if (block->IsLoopHeader()) {
413 loop_side_effects_[id].Add(side_effects);
414 }
415
416 // Propagate loop side effects upwards.
417 if (block->HasParentLoopHeader()) {
418 int header_id = block->parent_loop_header()->block_id();
419 loop_side_effects_[header_id].Add(block->IsLoopHeader()
420 ? loop_side_effects_[id]
421 : side_effects);
422 }
423 }
424}
425
426
427SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) {
428 char underlying_buffer[kLastFlag * 128];
429 Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer));
430#if DEBUG
431 int offset = 0;
432 const char* separator = "";
433 const char* comma = ", ";
434 buffer[0] = 0;
435 uint32_t set_depends_on = 0;
436 uint32_t set_changes = 0;
437 for (int bit = 0; bit < kLastFlag; ++bit) {
438 if ((flags.ToIntegral() & (1 << bit)) != 0) {
439 if (bit % 2 == 0) {
440 set_changes++;
441 } else {
442 set_depends_on++;
443 }
444 }
445 }
446 bool positive_changes = set_changes < (kLastFlag / 2);
447 bool positive_depends_on = set_depends_on < (kLastFlag / 2);
448 if (set_changes > 0) {
449 if (positive_changes) {
450 offset += OS::SNPrintF(buffer + offset, "changes [");
451 } else {
452 offset += OS::SNPrintF(buffer + offset, "changes all except [");
453 }
454 for (int bit = 0; bit < kLastFlag; ++bit) {
455 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) {
456 switch (static_cast<GVNFlag>(bit)) {
457#define DECLARE_FLAG(type) \
458 case kChanges##type: \
459 offset += OS::SNPrintF(buffer + offset, separator); \
460 offset += OS::SNPrintF(buffer + offset, #type); \
461 separator = comma; \
462 break;
463GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
464GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
465#undef DECLARE_FLAG
466 default:
467 break;
468 }
469 }
470 }
471 offset += OS::SNPrintF(buffer + offset, "]");
472 }
473 if (set_depends_on > 0) {
474 separator = "";
475 if (set_changes > 0) {
476 offset += OS::SNPrintF(buffer + offset, ", ");
477 }
478 if (positive_depends_on) {
479 offset += OS::SNPrintF(buffer + offset, "depends on [");
480 } else {
481 offset += OS::SNPrintF(buffer + offset, "depends on all except [");
482 }
483 for (int bit = 0; bit < kLastFlag; ++bit) {
484 if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) {
485 switch (static_cast<GVNFlag>(bit)) {
486#define DECLARE_FLAG(type) \
487 case kDependsOn##type: \
488 offset += OS::SNPrintF(buffer + offset, separator); \
489 offset += OS::SNPrintF(buffer + offset, #type); \
490 separator = comma; \
491 break;
492GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
493GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
494#undef DECLARE_FLAG
495 default:
496 break;
497 }
498 }
499 }
500 offset += OS::SNPrintF(buffer + offset, "]");
501 }
502#else
503 OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
504#endif
505 size_t string_len = strlen(underlying_buffer) + 1;
506 ASSERT(string_len <= sizeof(underlying_buffer));
507 char* result = new char[strlen(underlying_buffer) + 1];
508 OS::MemCopy(result, underlying_buffer, string_len);
509 return SmartArrayPointer<char>(result);
510}
511
512
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000513void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000514 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000515 graph()->use_optimistic_licm() ? "yes" : "no");
516 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
517 HBasicBlock* block = graph()->blocks()->at(i);
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000518 if (block->IsLoopHeader()) {
519 GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
520 TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
521 block->block_id(),
522 *GetGVNFlagsString(side_effects));
523
524 GVNFlagSet accumulated_first_time_depends;
525 GVNFlagSet accumulated_first_time_changes;
526 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
527 for (int j = block->block_id(); j <= last->block_id(); ++j) {
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000528 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects,
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000529 &accumulated_first_time_depends,
530 &accumulated_first_time_changes);
531 }
532 }
533 }
534}
535
536
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000537void HGlobalValueNumberingPhase::ProcessLoopBlock(
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000538 HBasicBlock* block,
539 HBasicBlock* loop_header,
540 GVNFlagSet loop_kills,
541 GVNFlagSet* first_time_depends,
542 GVNFlagSet* first_time_changes) {
543 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
544 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
545 TRACE_GVN_2("Loop invariant motion for B%d %s\n",
546 block->block_id(),
547 *GetGVNFlagsString(depends_flags));
548 HInstruction* instr = block->first();
549 while (instr != NULL) {
550 HInstruction* next = instr->next();
551 bool hoisted = false;
552 if (instr->CheckFlag(HValue::kUseGVN)) {
553 TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
554 instr->id(),
555 instr->Mnemonic(),
556 *GetGVNFlagsString(instr->gvn_flags()),
557 *GetGVNFlagsString(loop_kills));
558 bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
559 if (can_hoist && !graph()->use_optimistic_licm()) {
560 can_hoist = block->IsLoopSuccessorDominator();
561 }
562
563 if (can_hoist) {
564 bool inputs_loop_invariant = true;
565 for (int i = 0; i < instr->OperandCount(); ++i) {
566 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
567 inputs_loop_invariant = false;
568 }
569 }
570
571 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
572 TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
573 // Move the instruction out of the loop.
574 instr->Unlink();
575 instr->InsertBefore(pre_header->end());
576 if (instr->HasSideEffects()) removed_side_effects_ = true;
577 hoisted = true;
578 }
579 }
580 }
581 if (!hoisted) {
582 // If an instruction is not hoisted, we have to account for its side
583 // effects when hoisting later HTransitionElementsKind instructions.
584 GVNFlagSet previous_depends = *first_time_depends;
585 GVNFlagSet previous_changes = *first_time_changes;
586 first_time_depends->Add(instr->DependsOnFlags());
587 first_time_changes->Add(instr->ChangesFlags());
588 if (!(previous_depends == *first_time_depends)) {
589 TRACE_GVN_1("Updated first-time accumulated %s\n",
590 *GetGVNFlagsString(*first_time_depends));
591 }
592 if (!(previous_changes == *first_time_changes)) {
593 TRACE_GVN_1("Updated first-time accumulated %s\n",
594 *GetGVNFlagsString(*first_time_changes));
595 }
596 }
597 instr = next;
598 }
599}
600
601
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000602bool HGlobalValueNumberingPhase::AllowCodeMotion() {
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000603 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count;
604}
605
606
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000607bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
608 HBasicBlock* loop_header) {
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000609 // If we've disabled code motion or we're in a block that unconditionally
610 // deoptimizes, don't move any instructions.
611 return AllowCodeMotion() && !instr->block()->IsDeoptimizing();
612}
613
614
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000615GVNFlagSet
616HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock(
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000617 HBasicBlock* dominator, HBasicBlock* dominated) {
618 GVNFlagSet side_effects;
619 for (int i = 0; i < dominated->predecessors()->length(); ++i) {
620 HBasicBlock* block = dominated->predecessors()->at(i);
621 if (dominator->block_id() < block->block_id() &&
622 block->block_id() < dominated->block_id() &&
623 visited_on_paths_.Add(block->block_id())) {
624 side_effects.Add(block_side_effects_[block->block_id()]);
625 if (block->IsLoopHeader()) {
626 side_effects.Add(loop_side_effects_[block->block_id()]);
627 }
628 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
629 dominator, block));
630 }
631 }
632 return side_effects;
633}
634
635
636// Each instance of this class is like a "stack frame" for the recursive
637// traversal of the dominator tree done during GVN (the stack is handled
638// as a double linked list).
639// We reuse frames when possible so the list length is limited by the depth
640// of the dominator tree but this forces us to initialize each frame calling
641// an explicit "Initialize" method instead of a using constructor.
642class GvnBasicBlockState: public ZoneObject {
643 public:
644 static GvnBasicBlockState* CreateEntry(Zone* zone,
645 HBasicBlock* entry_block,
646 HValueMap* entry_map) {
647 return new(zone)
648 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
649 }
650
651 HBasicBlock* block() { return block_; }
652 HValueMap* map() { return map_; }
653 HSideEffectMap* dominators() { return &dominators_; }
654
655 GvnBasicBlockState* next_in_dominator_tree_traversal(
656 Zone* zone,
657 HBasicBlock** dominator) {
658 // This assignment needs to happen before calling next_dominated() because
659 // that call can reuse "this" if we are at the last dominated block.
660 *dominator = block();
661 GvnBasicBlockState* result = next_dominated(zone);
662 if (result == NULL) {
663 GvnBasicBlockState* dominator_state = pop();
664 if (dominator_state != NULL) {
665 // This branch is guaranteed not to return NULL because pop() never
666 // returns a state where "is_done() == true".
667 *dominator = dominator_state->block();
668 result = dominator_state->next_dominated(zone);
669 } else {
670 // Unnecessary (we are returning NULL) but done for cleanness.
671 *dominator = NULL;
672 }
673 }
674 return result;
675 }
676
677 private:
678 void Initialize(HBasicBlock* block,
679 HValueMap* map,
680 HSideEffectMap* dominators,
681 bool copy_map,
682 Zone* zone) {
683 block_ = block;
684 map_ = copy_map ? map->Copy(zone) : map;
685 dominated_index_ = -1;
686 length_ = block->dominated_blocks()->length();
687 if (dominators != NULL) {
688 dominators_ = *dominators;
689 }
690 }
691 bool is_done() { return dominated_index_ >= length_; }
692
693 GvnBasicBlockState(GvnBasicBlockState* previous,
694 HBasicBlock* block,
695 HValueMap* map,
696 HSideEffectMap* dominators,
697 Zone* zone)
698 : previous_(previous), next_(NULL) {
699 Initialize(block, map, dominators, true, zone);
700 }
701
702 GvnBasicBlockState* next_dominated(Zone* zone) {
703 dominated_index_++;
704 if (dominated_index_ == length_ - 1) {
705 // No need to copy the map for the last child in the dominator tree.
706 Initialize(block_->dominated_blocks()->at(dominated_index_),
707 map(),
708 dominators(),
709 false,
710 zone);
711 return this;
712 } else if (dominated_index_ < length_) {
713 return push(zone,
714 block_->dominated_blocks()->at(dominated_index_),
715 dominators());
716 } else {
717 return NULL;
718 }
719 }
720
721 GvnBasicBlockState* push(Zone* zone,
722 HBasicBlock* block,
723 HSideEffectMap* dominators) {
724 if (next_ == NULL) {
725 next_ =
726 new(zone) GvnBasicBlockState(this, block, map(), dominators, zone);
727 } else {
728 next_->Initialize(block, map(), dominators, true, zone);
729 }
730 return next_;
731 }
732 GvnBasicBlockState* pop() {
733 GvnBasicBlockState* result = previous_;
734 while (result != NULL && result->is_done()) {
735 TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
736 block()->block_id(),
737 previous_->block()->block_id())
738 result = result->previous_;
739 }
740 return result;
741 }
742
743 GvnBasicBlockState* previous_;
744 GvnBasicBlockState* next_;
745 HBasicBlock* block_;
746 HValueMap* map_;
747 HSideEffectMap dominators_;
748 int dominated_index_;
749 int length_;
750};
751
752// This is a recursive traversal of the dominator tree but it has been turned
753// into a loop to avoid stack overflows.
754// The logical "stack frames" of the recursion are kept in a list of
755// GvnBasicBlockState instances.
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000756void HGlobalValueNumberingPhase::AnalyzeGraph() {
757 HBasicBlock* entry_block = graph()->entry_block();
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000758 HValueMap* entry_map = new(zone()) HValueMap(zone());
759 GvnBasicBlockState* current =
760 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
761
762 while (current != NULL) {
763 HBasicBlock* block = current->block();
764 HValueMap* map = current->map();
765 HSideEffectMap* dominators = current->dominators();
766
767 TRACE_GVN_2("Analyzing block B%d%s\n",
768 block->block_id(),
769 block->IsLoopHeader() ? " (loop header)" : "");
770
771 // If this is a loop header kill everything killed by the loop.
772 if (block->IsLoopHeader()) {
773 map->Kill(loop_side_effects_[block->block_id()]);
774 }
775
776 // Go through all instructions of the current block.
777 HInstruction* instr = block->first();
778 while (instr != NULL) {
779 HInstruction* next = instr->next();
780 GVNFlagSet flags = instr->ChangesFlags();
781 if (!flags.IsEmpty()) {
782 // Clear all instructions in the map that are affected by side effects.
783 // Store instruction as the dominating one for tracked side effects.
784 map->Kill(flags);
785 dominators->Store(flags, instr);
786 TRACE_GVN_2("Instruction %d %s\n", instr->id(),
787 *GetGVNFlagsString(flags));
788 }
789 if (instr->CheckFlag(HValue::kUseGVN)) {
790 ASSERT(!instr->HasObservableSideEffects());
791 HValue* other = map->Lookup(instr);
792 if (other != NULL) {
793 ASSERT(instr->Equals(other) && other->Equals(instr));
794 TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
795 instr->id(),
796 instr->Mnemonic(),
797 other->id(),
798 other->Mnemonic());
799 if (instr->HasSideEffects()) removed_side_effects_ = true;
800 instr->DeleteAndReplaceWith(other);
801 } else {
802 map->Add(instr, zone());
803 }
804 }
805 if (instr->IsLinked() &&
806 instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
807 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
808 HValue* other = dominators->at(i);
809 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
810 GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
811 if (instr->DependsOnFlags().Contains(depends_on_flag) &&
812 (other != NULL)) {
813 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
814 i,
815 instr->id(),
816 instr->Mnemonic(),
817 other->id(),
818 other->Mnemonic());
819 instr->SetSideEffectDominator(changes_flag, other);
820 }
821 }
822 }
823 instr = next;
824 }
825
826 HBasicBlock* dominator_block;
827 GvnBasicBlockState* next =
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000828 current->next_in_dominator_tree_traversal(zone(),
829 &dominator_block);
jkummerow@chromium.orgc1184022013-05-28 16:58:15 +0000830
831 if (next != NULL) {
832 HBasicBlock* dominated = next->block();
833 HValueMap* successor_map = next->map();
834 HSideEffectMap* successor_dominators = next->dominators();
835
836 // Kill everything killed on any path between this block and the
837 // dominated block. We don't have to traverse these paths if the
838 // value map and the dominators list is already empty. If the range
839 // of block ids (block_id, dominated_id) is empty there are no such
840 // paths.
841 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
842 dominator_block->block_id() + 1 < dominated->block_id()) {
843 visited_on_paths_.Clear();
844 GVNFlagSet side_effects_on_all_paths =
845 CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
846 dominated);
847 successor_map->Kill(side_effects_on_all_paths);
848 successor_dominators->Kill(side_effects_on_all_paths);
849 }
850 }
851 current = next;
852 }
853}
854
855} } // namespace v8::internal