jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 1 | // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 | // Redistribution and use in source and binary forms, with or without |
| 3 | // modification, are permitted provided that the following conditions are |
| 4 | // met: |
| 5 | // |
| 6 | // * Redistributions of source code must retain the above copyright |
| 7 | // notice, this list of conditions and the following disclaimer. |
| 8 | // * Redistributions in binary form must reproduce the above |
| 9 | // copyright notice, this list of conditions and the following |
| 10 | // disclaimer in the documentation and/or other materials provided |
| 11 | // with the distribution. |
| 12 | // * Neither the name of Google Inc. nor the names of its |
| 13 | // contributors may be used to endorse or promote products derived |
| 14 | // from this software without specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | |
| 28 | #include "hydrogen.h" |
| 29 | #include "hydrogen-gvn.h" |
| 30 | #include "v8.h" |
| 31 | |
| 32 | namespace v8 { |
| 33 | namespace internal { |
| 34 | |
| 35 | class HValueMap: public ZoneObject { |
| 36 | public: |
| 37 | explicit HValueMap(Zone* zone) |
| 38 | : array_size_(0), |
| 39 | lists_size_(0), |
| 40 | count_(0), |
| 41 | present_flags_(0), |
| 42 | array_(NULL), |
| 43 | lists_(NULL), |
| 44 | free_list_head_(kNil) { |
| 45 | ResizeLists(kInitialSize, zone); |
| 46 | Resize(kInitialSize, zone); |
| 47 | } |
| 48 | |
| 49 | void Kill(GVNFlagSet flags); |
| 50 | |
| 51 | void Add(HValue* value, Zone* zone) { |
| 52 | present_flags_.Add(value->gvn_flags()); |
| 53 | Insert(value, zone); |
| 54 | } |
| 55 | |
| 56 | HValue* Lookup(HValue* value) const; |
| 57 | |
| 58 | HValueMap* Copy(Zone* zone) const { |
| 59 | return new(zone) HValueMap(zone, this); |
| 60 | } |
| 61 | |
| 62 | bool IsEmpty() const { return count_ == 0; } |
| 63 | |
| 64 | private: |
| 65 | // A linked list of HValue* values. Stored in arrays. |
| 66 | struct HValueMapListElement { |
| 67 | HValue* value; |
| 68 | int next; // Index in the array of the next list element. |
| 69 | }; |
| 70 | static const int kNil = -1; // The end of a linked list |
| 71 | |
| 72 | // Must be a power of 2. |
| 73 | static const int kInitialSize = 16; |
| 74 | |
| 75 | HValueMap(Zone* zone, const HValueMap* other); |
| 76 | |
| 77 | void Resize(int new_size, Zone* zone); |
| 78 | void ResizeLists(int new_size, Zone* zone); |
| 79 | void Insert(HValue* value, Zone* zone); |
| 80 | uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } |
| 81 | |
| 82 | int array_size_; |
| 83 | int lists_size_; |
| 84 | int count_; // The number of values stored in the HValueMap. |
| 85 | GVNFlagSet present_flags_; // All flags that are in any value in the |
| 86 | // HValueMap. |
| 87 | HValueMapListElement* array_; // Primary store - contains the first value |
| 88 | // with a given hash. Colliding elements are stored in linked lists. |
| 89 | HValueMapListElement* lists_; // The linked lists containing hash collisions. |
| 90 | int free_list_head_; // Unused elements in lists_ are on the free list. |
| 91 | }; |
| 92 | |
| 93 | |
| 94 | class HSideEffectMap BASE_EMBEDDED { |
| 95 | public: |
| 96 | HSideEffectMap(); |
| 97 | explicit HSideEffectMap(HSideEffectMap* other); |
| 98 | HSideEffectMap& operator= (const HSideEffectMap& other); |
| 99 | |
| 100 | void Kill(GVNFlagSet flags); |
| 101 | |
| 102 | void Store(GVNFlagSet flags, HInstruction* instr); |
| 103 | |
| 104 | bool IsEmpty() const { return count_ == 0; } |
| 105 | |
| 106 | inline HInstruction* operator[](int i) const { |
| 107 | ASSERT(0 <= i); |
| 108 | ASSERT(i < kNumberOfTrackedSideEffects); |
| 109 | return data_[i]; |
| 110 | } |
| 111 | inline HInstruction* at(int i) const { return operator[](i); } |
| 112 | |
| 113 | private: |
| 114 | int count_; |
| 115 | HInstruction* data_[kNumberOfTrackedSideEffects]; |
| 116 | }; |
| 117 | |
| 118 | |
| 119 | void TraceGVN(const char* msg, ...) { |
| 120 | va_list arguments; |
| 121 | va_start(arguments, msg); |
| 122 | OS::VPrint(msg, arguments); |
| 123 | va_end(arguments); |
| 124 | } |
| 125 | |
mstarzinger@chromium.org | e0e1b0d | 2013-07-08 08:38:06 +0000 | [diff] [blame] | 126 | |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 127 | // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when |
| 128 | // --trace-gvn is off. |
| 129 | #define TRACE_GVN_1(msg, a1) \ |
| 130 | if (FLAG_trace_gvn) { \ |
| 131 | TraceGVN(msg, a1); \ |
| 132 | } |
| 133 | |
| 134 | #define TRACE_GVN_2(msg, a1, a2) \ |
| 135 | if (FLAG_trace_gvn) { \ |
| 136 | TraceGVN(msg, a1, a2); \ |
| 137 | } |
| 138 | |
| 139 | #define TRACE_GVN_3(msg, a1, a2, a3) \ |
| 140 | if (FLAG_trace_gvn) { \ |
| 141 | TraceGVN(msg, a1, a2, a3); \ |
| 142 | } |
| 143 | |
| 144 | #define TRACE_GVN_4(msg, a1, a2, a3, a4) \ |
| 145 | if (FLAG_trace_gvn) { \ |
| 146 | TraceGVN(msg, a1, a2, a3, a4); \ |
| 147 | } |
| 148 | |
| 149 | #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \ |
| 150 | if (FLAG_trace_gvn) { \ |
| 151 | TraceGVN(msg, a1, a2, a3, a4, a5); \ |
| 152 | } |
| 153 | |
| 154 | |
| 155 | HValueMap::HValueMap(Zone* zone, const HValueMap* other) |
| 156 | : array_size_(other->array_size_), |
| 157 | lists_size_(other->lists_size_), |
| 158 | count_(other->count_), |
| 159 | present_flags_(other->present_flags_), |
| 160 | array_(zone->NewArray<HValueMapListElement>(other->array_size_)), |
| 161 | lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)), |
| 162 | free_list_head_(other->free_list_head_) { |
| 163 | OS::MemCopy( |
| 164 | array_, other->array_, array_size_ * sizeof(HValueMapListElement)); |
| 165 | OS::MemCopy( |
| 166 | lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement)); |
| 167 | } |
| 168 | |
| 169 | |
| 170 | void HValueMap::Kill(GVNFlagSet flags) { |
| 171 | GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags); |
| 172 | if (!present_flags_.ContainsAnyOf(depends_flags)) return; |
| 173 | present_flags_.RemoveAll(); |
| 174 | for (int i = 0; i < array_size_; ++i) { |
| 175 | HValue* value = array_[i].value; |
| 176 | if (value != NULL) { |
| 177 | // Clear list of collisions first, so we know if it becomes empty. |
| 178 | int kept = kNil; // List of kept elements. |
| 179 | int next; |
| 180 | for (int current = array_[i].next; current != kNil; current = next) { |
| 181 | next = lists_[current].next; |
| 182 | HValue* value = lists_[current].value; |
| 183 | if (value->gvn_flags().ContainsAnyOf(depends_flags)) { |
| 184 | // Drop it. |
| 185 | count_--; |
| 186 | lists_[current].next = free_list_head_; |
| 187 | free_list_head_ = current; |
| 188 | } else { |
| 189 | // Keep it. |
| 190 | lists_[current].next = kept; |
| 191 | kept = current; |
| 192 | present_flags_.Add(value->gvn_flags()); |
| 193 | } |
| 194 | } |
| 195 | array_[i].next = kept; |
| 196 | |
| 197 | // Now possibly drop directly indexed element. |
| 198 | value = array_[i].value; |
| 199 | if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it. |
| 200 | count_--; |
| 201 | int head = array_[i].next; |
| 202 | if (head == kNil) { |
| 203 | array_[i].value = NULL; |
| 204 | } else { |
| 205 | array_[i].value = lists_[head].value; |
| 206 | array_[i].next = lists_[head].next; |
| 207 | lists_[head].next = free_list_head_; |
| 208 | free_list_head_ = head; |
| 209 | } |
| 210 | } else { |
| 211 | present_flags_.Add(value->gvn_flags()); // Keep it. |
| 212 | } |
| 213 | } |
| 214 | } |
| 215 | } |
| 216 | |
| 217 | |
| 218 | HValue* HValueMap::Lookup(HValue* value) const { |
| 219 | uint32_t hash = static_cast<uint32_t>(value->Hashcode()); |
| 220 | uint32_t pos = Bound(hash); |
| 221 | if (array_[pos].value != NULL) { |
| 222 | if (array_[pos].value->Equals(value)) return array_[pos].value; |
| 223 | int next = array_[pos].next; |
| 224 | while (next != kNil) { |
| 225 | if (lists_[next].value->Equals(value)) return lists_[next].value; |
| 226 | next = lists_[next].next; |
| 227 | } |
| 228 | } |
| 229 | return NULL; |
| 230 | } |
| 231 | |
| 232 | |
| 233 | void HValueMap::Resize(int new_size, Zone* zone) { |
| 234 | ASSERT(new_size > count_); |
| 235 | // Hashing the values into the new array has no more collisions than in the |
| 236 | // old hash map, so we can use the existing lists_ array, if we are careful. |
| 237 | |
| 238 | // Make sure we have at least one free element. |
| 239 | if (free_list_head_ == kNil) { |
| 240 | ResizeLists(lists_size_ << 1, zone); |
| 241 | } |
| 242 | |
| 243 | HValueMapListElement* new_array = |
| 244 | zone->NewArray<HValueMapListElement>(new_size); |
| 245 | memset(new_array, 0, sizeof(HValueMapListElement) * new_size); |
| 246 | |
| 247 | HValueMapListElement* old_array = array_; |
| 248 | int old_size = array_size_; |
| 249 | |
| 250 | int old_count = count_; |
| 251 | count_ = 0; |
| 252 | // Do not modify present_flags_. It is currently correct. |
| 253 | array_size_ = new_size; |
| 254 | array_ = new_array; |
| 255 | |
| 256 | if (old_array != NULL) { |
| 257 | // Iterate over all the elements in lists, rehashing them. |
| 258 | for (int i = 0; i < old_size; ++i) { |
| 259 | if (old_array[i].value != NULL) { |
| 260 | int current = old_array[i].next; |
| 261 | while (current != kNil) { |
| 262 | Insert(lists_[current].value, zone); |
| 263 | int next = lists_[current].next; |
| 264 | lists_[current].next = free_list_head_; |
| 265 | free_list_head_ = current; |
| 266 | current = next; |
| 267 | } |
| 268 | // Rehash the directly stored value. |
| 269 | Insert(old_array[i].value, zone); |
| 270 | } |
| 271 | } |
| 272 | } |
| 273 | USE(old_count); |
| 274 | ASSERT(count_ == old_count); |
| 275 | } |
| 276 | |
| 277 | |
| 278 | void HValueMap::ResizeLists(int new_size, Zone* zone) { |
| 279 | ASSERT(new_size > lists_size_); |
| 280 | |
| 281 | HValueMapListElement* new_lists = |
| 282 | zone->NewArray<HValueMapListElement>(new_size); |
| 283 | memset(new_lists, 0, sizeof(HValueMapListElement) * new_size); |
| 284 | |
| 285 | HValueMapListElement* old_lists = lists_; |
| 286 | int old_size = lists_size_; |
| 287 | |
| 288 | lists_size_ = new_size; |
| 289 | lists_ = new_lists; |
| 290 | |
| 291 | if (old_lists != NULL) { |
| 292 | OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement)); |
| 293 | } |
| 294 | for (int i = old_size; i < lists_size_; ++i) { |
| 295 | lists_[i].next = free_list_head_; |
| 296 | free_list_head_ = i; |
| 297 | } |
| 298 | } |
| 299 | |
| 300 | |
| 301 | void HValueMap::Insert(HValue* value, Zone* zone) { |
| 302 | ASSERT(value != NULL); |
| 303 | // Resizing when half of the hashtable is filled up. |
| 304 | if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); |
| 305 | ASSERT(count_ < array_size_); |
| 306 | count_++; |
| 307 | uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode())); |
| 308 | if (array_[pos].value == NULL) { |
| 309 | array_[pos].value = value; |
| 310 | array_[pos].next = kNil; |
| 311 | } else { |
| 312 | if (free_list_head_ == kNil) { |
| 313 | ResizeLists(lists_size_ << 1, zone); |
| 314 | } |
| 315 | int new_element_pos = free_list_head_; |
| 316 | ASSERT(new_element_pos != kNil); |
| 317 | free_list_head_ = lists_[free_list_head_].next; |
| 318 | lists_[new_element_pos].value = value; |
| 319 | lists_[new_element_pos].next = array_[pos].next; |
| 320 | ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL); |
| 321 | array_[pos].next = new_element_pos; |
| 322 | } |
| 323 | } |
| 324 | |
| 325 | |
| 326 | HSideEffectMap::HSideEffectMap() : count_(0) { |
| 327 | memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize); |
| 328 | } |
| 329 | |
| 330 | |
| 331 | HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { |
| 332 | *this = *other; // Calls operator=. |
| 333 | } |
| 334 | |
| 335 | |
| 336 | HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) { |
| 337 | if (this != &other) { |
| 338 | OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); |
| 339 | } |
| 340 | return *this; |
| 341 | } |
| 342 | |
mstarzinger@chromium.org | e0e1b0d | 2013-07-08 08:38:06 +0000 | [diff] [blame] | 343 | |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 344 | void HSideEffectMap::Kill(GVNFlagSet flags) { |
| 345 | for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
| 346 | GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
| 347 | if (flags.Contains(changes_flag)) { |
| 348 | if (data_[i] != NULL) count_--; |
| 349 | data_[i] = NULL; |
| 350 | } |
| 351 | } |
| 352 | } |
| 353 | |
| 354 | |
| 355 | void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) { |
| 356 | for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
| 357 | GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
| 358 | if (flags.Contains(changes_flag)) { |
| 359 | if (data_[i] == NULL) count_++; |
| 360 | data_[i] = instr; |
| 361 | } |
| 362 | } |
| 363 | } |
| 364 | |
| 365 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 366 | HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph) |
| 367 | : HPhase("H_Global value numbering", graph), |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 368 | removed_side_effects_(false), |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 369 | block_side_effects_(graph->blocks()->length(), zone()), |
| 370 | loop_side_effects_(graph->blocks()->length(), zone()), |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 371 | visited_on_paths_(graph->blocks()->length(), zone()) { |
rossberg@chromium.org | 79e7902 | 2013-06-03 15:43:46 +0000 | [diff] [blame] | 372 | ASSERT(!AllowHandleAllocation::IsAllowed()); |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 373 | block_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(), |
| 374 | zone()); |
| 375 | loop_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(), |
| 376 | zone()); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 377 | } |
| 378 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 379 | void HGlobalValueNumberingPhase::Analyze() { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 380 | removed_side_effects_ = false; |
| 381 | ComputeBlockSideEffects(); |
| 382 | if (FLAG_loop_invariant_code_motion) { |
| 383 | LoopInvariantCodeMotion(); |
| 384 | } |
| 385 | AnalyzeGraph(); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 386 | } |
| 387 | |
| 388 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 389 | void HGlobalValueNumberingPhase::ComputeBlockSideEffects() { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 390 | // The Analyze phase of GVN can be called multiple times. Clear loop side |
| 391 | // effects before computing them to erase the contents from previous Analyze |
| 392 | // passes. |
| 393 | for (int i = 0; i < loop_side_effects_.length(); ++i) { |
| 394 | loop_side_effects_[i].RemoveAll(); |
| 395 | } |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 396 | for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 397 | // Compute side effects for the block. |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 398 | HBasicBlock* block = graph()->blocks()->at(i); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 399 | int id = block->block_id(); |
| 400 | GVNFlagSet side_effects; |
jkummerow@chromium.org | 93a47f4 | 2013-07-02 14:43:41 +0000 | [diff] [blame] | 401 | for (HInstructionIterator it(block); !it.Done(); it.Advance()) { |
| 402 | HInstruction* instr = it.Current(); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 403 | side_effects.Add(instr->ChangesFlags()); |
yangguo@chromium.org | c73d55b | 2013-07-24 08:18:28 +0000 | [diff] [blame^] | 404 | if (instr->IsDeoptimize()) { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 405 | block_side_effects_[id].RemoveAll(); |
| 406 | side_effects.RemoveAll(); |
| 407 | break; |
| 408 | } |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 409 | } |
| 410 | block_side_effects_[id].Add(side_effects); |
| 411 | |
| 412 | // Loop headers are part of their loop. |
| 413 | if (block->IsLoopHeader()) { |
| 414 | loop_side_effects_[id].Add(side_effects); |
| 415 | } |
| 416 | |
| 417 | // Propagate loop side effects upwards. |
| 418 | if (block->HasParentLoopHeader()) { |
| 419 | int header_id = block->parent_loop_header()->block_id(); |
| 420 | loop_side_effects_[header_id].Add(block->IsLoopHeader() |
| 421 | ? loop_side_effects_[id] |
| 422 | : side_effects); |
| 423 | } |
| 424 | } |
| 425 | } |
| 426 | |
| 427 | |
| 428 | SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) { |
| 429 | char underlying_buffer[kLastFlag * 128]; |
| 430 | Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer)); |
| 431 | #if DEBUG |
| 432 | int offset = 0; |
| 433 | const char* separator = ""; |
| 434 | const char* comma = ", "; |
| 435 | buffer[0] = 0; |
| 436 | uint32_t set_depends_on = 0; |
| 437 | uint32_t set_changes = 0; |
| 438 | for (int bit = 0; bit < kLastFlag; ++bit) { |
| 439 | if ((flags.ToIntegral() & (1 << bit)) != 0) { |
| 440 | if (bit % 2 == 0) { |
| 441 | set_changes++; |
| 442 | } else { |
| 443 | set_depends_on++; |
| 444 | } |
| 445 | } |
| 446 | } |
| 447 | bool positive_changes = set_changes < (kLastFlag / 2); |
| 448 | bool positive_depends_on = set_depends_on < (kLastFlag / 2); |
| 449 | if (set_changes > 0) { |
| 450 | if (positive_changes) { |
| 451 | offset += OS::SNPrintF(buffer + offset, "changes ["); |
| 452 | } else { |
| 453 | offset += OS::SNPrintF(buffer + offset, "changes all except ["); |
| 454 | } |
| 455 | for (int bit = 0; bit < kLastFlag; ++bit) { |
| 456 | if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) { |
| 457 | switch (static_cast<GVNFlag>(bit)) { |
| 458 | #define DECLARE_FLAG(type) \ |
| 459 | case kChanges##type: \ |
| 460 | offset += OS::SNPrintF(buffer + offset, separator); \ |
| 461 | offset += OS::SNPrintF(buffer + offset, #type); \ |
| 462 | separator = comma; \ |
| 463 | break; |
| 464 | GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
| 465 | GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
| 466 | #undef DECLARE_FLAG |
| 467 | default: |
| 468 | break; |
| 469 | } |
| 470 | } |
| 471 | } |
| 472 | offset += OS::SNPrintF(buffer + offset, "]"); |
| 473 | } |
| 474 | if (set_depends_on > 0) { |
| 475 | separator = ""; |
| 476 | if (set_changes > 0) { |
| 477 | offset += OS::SNPrintF(buffer + offset, ", "); |
| 478 | } |
| 479 | if (positive_depends_on) { |
| 480 | offset += OS::SNPrintF(buffer + offset, "depends on ["); |
| 481 | } else { |
| 482 | offset += OS::SNPrintF(buffer + offset, "depends on all except ["); |
| 483 | } |
| 484 | for (int bit = 0; bit < kLastFlag; ++bit) { |
| 485 | if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) { |
| 486 | switch (static_cast<GVNFlag>(bit)) { |
| 487 | #define DECLARE_FLAG(type) \ |
| 488 | case kDependsOn##type: \ |
| 489 | offset += OS::SNPrintF(buffer + offset, separator); \ |
| 490 | offset += OS::SNPrintF(buffer + offset, #type); \ |
| 491 | separator = comma; \ |
| 492 | break; |
| 493 | GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
| 494 | GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
| 495 | #undef DECLARE_FLAG |
| 496 | default: |
| 497 | break; |
| 498 | } |
| 499 | } |
| 500 | } |
| 501 | offset += OS::SNPrintF(buffer + offset, "]"); |
| 502 | } |
| 503 | #else |
| 504 | OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral()); |
| 505 | #endif |
| 506 | size_t string_len = strlen(underlying_buffer) + 1; |
| 507 | ASSERT(string_len <= sizeof(underlying_buffer)); |
| 508 | char* result = new char[strlen(underlying_buffer) + 1]; |
| 509 | OS::MemCopy(result, underlying_buffer, string_len); |
| 510 | return SmartArrayPointer<char>(result); |
| 511 | } |
| 512 | |
| 513 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 514 | void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 515 | TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n", |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 516 | graph()->use_optimistic_licm() ? "yes" : "no"); |
| 517 | for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { |
| 518 | HBasicBlock* block = graph()->blocks()->at(i); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 519 | if (block->IsLoopHeader()) { |
| 520 | GVNFlagSet side_effects = loop_side_effects_[block->block_id()]; |
| 521 | TRACE_GVN_2("Try loop invariant motion for block B%d %s\n", |
| 522 | block->block_id(), |
| 523 | *GetGVNFlagsString(side_effects)); |
| 524 | |
| 525 | GVNFlagSet accumulated_first_time_depends; |
| 526 | GVNFlagSet accumulated_first_time_changes; |
| 527 | HBasicBlock* last = block->loop_information()->GetLastBackEdge(); |
| 528 | for (int j = block->block_id(); j <= last->block_id(); ++j) { |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 529 | ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects, |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 530 | &accumulated_first_time_depends, |
| 531 | &accumulated_first_time_changes); |
| 532 | } |
| 533 | } |
| 534 | } |
| 535 | } |
| 536 | |
| 537 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 538 | void HGlobalValueNumberingPhase::ProcessLoopBlock( |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 539 | HBasicBlock* block, |
| 540 | HBasicBlock* loop_header, |
| 541 | GVNFlagSet loop_kills, |
| 542 | GVNFlagSet* first_time_depends, |
| 543 | GVNFlagSet* first_time_changes) { |
| 544 | HBasicBlock* pre_header = loop_header->predecessors()->at(0); |
| 545 | GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); |
| 546 | TRACE_GVN_2("Loop invariant motion for B%d %s\n", |
| 547 | block->block_id(), |
| 548 | *GetGVNFlagsString(depends_flags)); |
| 549 | HInstruction* instr = block->first(); |
| 550 | while (instr != NULL) { |
| 551 | HInstruction* next = instr->next(); |
| 552 | bool hoisted = false; |
| 553 | if (instr->CheckFlag(HValue::kUseGVN)) { |
| 554 | TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n", |
| 555 | instr->id(), |
| 556 | instr->Mnemonic(), |
| 557 | *GetGVNFlagsString(instr->gvn_flags()), |
| 558 | *GetGVNFlagsString(loop_kills)); |
| 559 | bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags); |
| 560 | if (can_hoist && !graph()->use_optimistic_licm()) { |
| 561 | can_hoist = block->IsLoopSuccessorDominator(); |
| 562 | } |
| 563 | |
| 564 | if (can_hoist) { |
| 565 | bool inputs_loop_invariant = true; |
| 566 | for (int i = 0; i < instr->OperandCount(); ++i) { |
| 567 | if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { |
| 568 | inputs_loop_invariant = false; |
| 569 | } |
| 570 | } |
| 571 | |
| 572 | if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { |
| 573 | TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id()); |
| 574 | // Move the instruction out of the loop. |
| 575 | instr->Unlink(); |
| 576 | instr->InsertBefore(pre_header->end()); |
| 577 | if (instr->HasSideEffects()) removed_side_effects_ = true; |
| 578 | hoisted = true; |
| 579 | } |
| 580 | } |
| 581 | } |
| 582 | if (!hoisted) { |
| 583 | // If an instruction is not hoisted, we have to account for its side |
| 584 | // effects when hoisting later HTransitionElementsKind instructions. |
| 585 | GVNFlagSet previous_depends = *first_time_depends; |
| 586 | GVNFlagSet previous_changes = *first_time_changes; |
| 587 | first_time_depends->Add(instr->DependsOnFlags()); |
| 588 | first_time_changes->Add(instr->ChangesFlags()); |
| 589 | if (!(previous_depends == *first_time_depends)) { |
| 590 | TRACE_GVN_1("Updated first-time accumulated %s\n", |
| 591 | *GetGVNFlagsString(*first_time_depends)); |
| 592 | } |
| 593 | if (!(previous_changes == *first_time_changes)) { |
| 594 | TRACE_GVN_1("Updated first-time accumulated %s\n", |
| 595 | *GetGVNFlagsString(*first_time_changes)); |
| 596 | } |
| 597 | } |
| 598 | instr = next; |
| 599 | } |
| 600 | } |
| 601 | |
| 602 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 603 | bool HGlobalValueNumberingPhase::AllowCodeMotion() { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 604 | return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count; |
| 605 | } |
| 606 | |
| 607 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 608 | bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr, |
| 609 | HBasicBlock* loop_header) { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 610 | // If we've disabled code motion or we're in a block that unconditionally |
| 611 | // deoptimizes, don't move any instructions. |
| 612 | return AllowCodeMotion() && !instr->block()->IsDeoptimizing(); |
| 613 | } |
| 614 | |
| 615 | |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 616 | GVNFlagSet |
| 617 | HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock( |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 618 | HBasicBlock* dominator, HBasicBlock* dominated) { |
| 619 | GVNFlagSet side_effects; |
| 620 | for (int i = 0; i < dominated->predecessors()->length(); ++i) { |
| 621 | HBasicBlock* block = dominated->predecessors()->at(i); |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 622 | if (dominator->block_id() < block->block_id() && |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 623 | block->block_id() < dominated->block_id() && |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 624 | !visited_on_paths_.Contains(block->block_id())) { |
| 625 | visited_on_paths_.Add(block->block_id()); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 626 | side_effects.Add(block_side_effects_[block->block_id()]); |
| 627 | if (block->IsLoopHeader()) { |
| 628 | side_effects.Add(loop_side_effects_[block->block_id()]); |
| 629 | } |
| 630 | side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( |
| 631 | dominator, block)); |
| 632 | } |
| 633 | } |
| 634 | return side_effects; |
| 635 | } |
| 636 | |
| 637 | |
| 638 | // Each instance of this class is like a "stack frame" for the recursive |
| 639 | // traversal of the dominator tree done during GVN (the stack is handled |
| 640 | // as a double linked list). |
| 641 | // We reuse frames when possible so the list length is limited by the depth |
| 642 | // of the dominator tree but this forces us to initialize each frame calling |
| 643 | // an explicit "Initialize" method instead of a using constructor. |
| 644 | class GvnBasicBlockState: public ZoneObject { |
| 645 | public: |
| 646 | static GvnBasicBlockState* CreateEntry(Zone* zone, |
| 647 | HBasicBlock* entry_block, |
| 648 | HValueMap* entry_map) { |
| 649 | return new(zone) |
| 650 | GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); |
| 651 | } |
| 652 | |
| 653 | HBasicBlock* block() { return block_; } |
| 654 | HValueMap* map() { return map_; } |
| 655 | HSideEffectMap* dominators() { return &dominators_; } |
| 656 | |
| 657 | GvnBasicBlockState* next_in_dominator_tree_traversal( |
| 658 | Zone* zone, |
| 659 | HBasicBlock** dominator) { |
| 660 | // This assignment needs to happen before calling next_dominated() because |
| 661 | // that call can reuse "this" if we are at the last dominated block. |
| 662 | *dominator = block(); |
| 663 | GvnBasicBlockState* result = next_dominated(zone); |
| 664 | if (result == NULL) { |
| 665 | GvnBasicBlockState* dominator_state = pop(); |
| 666 | if (dominator_state != NULL) { |
| 667 | // This branch is guaranteed not to return NULL because pop() never |
| 668 | // returns a state where "is_done() == true". |
| 669 | *dominator = dominator_state->block(); |
| 670 | result = dominator_state->next_dominated(zone); |
| 671 | } else { |
| 672 | // Unnecessary (we are returning NULL) but done for cleanness. |
| 673 | *dominator = NULL; |
| 674 | } |
| 675 | } |
| 676 | return result; |
| 677 | } |
| 678 | |
| 679 | private: |
| 680 | void Initialize(HBasicBlock* block, |
| 681 | HValueMap* map, |
| 682 | HSideEffectMap* dominators, |
| 683 | bool copy_map, |
| 684 | Zone* zone) { |
| 685 | block_ = block; |
| 686 | map_ = copy_map ? map->Copy(zone) : map; |
| 687 | dominated_index_ = -1; |
| 688 | length_ = block->dominated_blocks()->length(); |
| 689 | if (dominators != NULL) { |
| 690 | dominators_ = *dominators; |
| 691 | } |
| 692 | } |
| 693 | bool is_done() { return dominated_index_ >= length_; } |
| 694 | |
| 695 | GvnBasicBlockState(GvnBasicBlockState* previous, |
| 696 | HBasicBlock* block, |
| 697 | HValueMap* map, |
| 698 | HSideEffectMap* dominators, |
| 699 | Zone* zone) |
| 700 | : previous_(previous), next_(NULL) { |
| 701 | Initialize(block, map, dominators, true, zone); |
| 702 | } |
| 703 | |
| 704 | GvnBasicBlockState* next_dominated(Zone* zone) { |
| 705 | dominated_index_++; |
| 706 | if (dominated_index_ == length_ - 1) { |
| 707 | // No need to copy the map for the last child in the dominator tree. |
| 708 | Initialize(block_->dominated_blocks()->at(dominated_index_), |
| 709 | map(), |
| 710 | dominators(), |
| 711 | false, |
| 712 | zone); |
| 713 | return this; |
| 714 | } else if (dominated_index_ < length_) { |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 715 | return push(zone, block_->dominated_blocks()->at(dominated_index_)); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 716 | } else { |
| 717 | return NULL; |
| 718 | } |
| 719 | } |
| 720 | |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 721 | GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) { |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 722 | if (next_ == NULL) { |
| 723 | next_ = |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 724 | new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 725 | } else { |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 726 | next_->Initialize(block, map(), dominators(), true, zone); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 727 | } |
| 728 | return next_; |
| 729 | } |
| 730 | GvnBasicBlockState* pop() { |
| 731 | GvnBasicBlockState* result = previous_; |
| 732 | while (result != NULL && result->is_done()) { |
| 733 | TRACE_GVN_2("Backtracking from block B%d to block b%d\n", |
| 734 | block()->block_id(), |
| 735 | previous_->block()->block_id()) |
| 736 | result = result->previous_; |
| 737 | } |
| 738 | return result; |
| 739 | } |
| 740 | |
| 741 | GvnBasicBlockState* previous_; |
| 742 | GvnBasicBlockState* next_; |
| 743 | HBasicBlock* block_; |
| 744 | HValueMap* map_; |
| 745 | HSideEffectMap dominators_; |
| 746 | int dominated_index_; |
| 747 | int length_; |
| 748 | }; |
| 749 | |
mstarzinger@chromium.org | e0e1b0d | 2013-07-08 08:38:06 +0000 | [diff] [blame] | 750 | |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 751 | // This is a recursive traversal of the dominator tree but it has been turned |
| 752 | // into a loop to avoid stack overflows. |
| 753 | // The logical "stack frames" of the recursion are kept in a list of |
| 754 | // GvnBasicBlockState instances. |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 755 | void HGlobalValueNumberingPhase::AnalyzeGraph() { |
| 756 | HBasicBlock* entry_block = graph()->entry_block(); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 757 | HValueMap* entry_map = new(zone()) HValueMap(zone()); |
| 758 | GvnBasicBlockState* current = |
| 759 | GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); |
| 760 | |
| 761 | while (current != NULL) { |
| 762 | HBasicBlock* block = current->block(); |
| 763 | HValueMap* map = current->map(); |
| 764 | HSideEffectMap* dominators = current->dominators(); |
| 765 | |
| 766 | TRACE_GVN_2("Analyzing block B%d%s\n", |
| 767 | block->block_id(), |
| 768 | block->IsLoopHeader() ? " (loop header)" : ""); |
| 769 | |
| 770 | // If this is a loop header kill everything killed by the loop. |
| 771 | if (block->IsLoopHeader()) { |
| 772 | map->Kill(loop_side_effects_[block->block_id()]); |
jkummerow@chromium.org | 1048047 | 2013-07-17 08:22:15 +0000 | [diff] [blame] | 773 | dominators->Kill(loop_side_effects_[block->block_id()]); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 774 | } |
| 775 | |
| 776 | // Go through all instructions of the current block. |
danno@chromium.org | 169691d | 2013-07-15 08:01:13 +0000 | [diff] [blame] | 777 | for (HInstructionIterator it(block); !it.Done(); it.Advance()) { |
| 778 | HInstruction* instr = it.Current(); |
| 779 | if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) { |
| 780 | for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
| 781 | HValue* other = dominators->at(i); |
| 782 | GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
| 783 | GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i); |
| 784 | if (instr->DependsOnFlags().Contains(depends_on_flag) && |
| 785 | (other != NULL)) { |
| 786 | TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n", |
| 787 | i, |
| 788 | instr->id(), |
| 789 | instr->Mnemonic(), |
| 790 | other->id(), |
| 791 | other->Mnemonic()); |
| 792 | instr->HandleSideEffectDominator(changes_flag, other); |
| 793 | } |
| 794 | } |
| 795 | } |
| 796 | // Instruction was unlinked during graph traversal. |
| 797 | if (!instr->IsLinked()) continue; |
| 798 | |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 799 | GVNFlagSet flags = instr->ChangesFlags(); |
| 800 | if (!flags.IsEmpty()) { |
| 801 | // Clear all instructions in the map that are affected by side effects. |
| 802 | // Store instruction as the dominating one for tracked side effects. |
| 803 | map->Kill(flags); |
| 804 | dominators->Store(flags, instr); |
| 805 | TRACE_GVN_2("Instruction %d %s\n", instr->id(), |
| 806 | *GetGVNFlagsString(flags)); |
| 807 | } |
| 808 | if (instr->CheckFlag(HValue::kUseGVN)) { |
| 809 | ASSERT(!instr->HasObservableSideEffects()); |
| 810 | HValue* other = map->Lookup(instr); |
| 811 | if (other != NULL) { |
| 812 | ASSERT(instr->Equals(other) && other->Equals(instr)); |
| 813 | TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n", |
| 814 | instr->id(), |
| 815 | instr->Mnemonic(), |
| 816 | other->id(), |
| 817 | other->Mnemonic()); |
| 818 | if (instr->HasSideEffects()) removed_side_effects_ = true; |
| 819 | instr->DeleteAndReplaceWith(other); |
| 820 | } else { |
| 821 | map->Add(instr, zone()); |
| 822 | } |
| 823 | } |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 824 | } |
| 825 | |
| 826 | HBasicBlock* dominator_block; |
| 827 | GvnBasicBlockState* next = |
mstarzinger@chromium.org | 1510d58 | 2013-06-28 14:00:48 +0000 | [diff] [blame] | 828 | current->next_in_dominator_tree_traversal(zone(), |
| 829 | &dominator_block); |
jkummerow@chromium.org | c118402 | 2013-05-28 16:58:15 +0000 | [diff] [blame] | 830 | |
| 831 | if (next != NULL) { |
| 832 | HBasicBlock* dominated = next->block(); |
| 833 | HValueMap* successor_map = next->map(); |
| 834 | HSideEffectMap* successor_dominators = next->dominators(); |
| 835 | |
| 836 | // Kill everything killed on any path between this block and the |
| 837 | // dominated block. We don't have to traverse these paths if the |
| 838 | // value map and the dominators list is already empty. If the range |
| 839 | // of block ids (block_id, dominated_id) is empty there are no such |
| 840 | // paths. |
| 841 | if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) && |
| 842 | dominator_block->block_id() + 1 < dominated->block_id()) { |
| 843 | visited_on_paths_.Clear(); |
| 844 | GVNFlagSet side_effects_on_all_paths = |
| 845 | CollectSideEffectsOnPathsToDominatedBlock(dominator_block, |
| 846 | dominated); |
| 847 | successor_map->Kill(side_effects_on_all_paths); |
| 848 | successor_dominators->Kill(side_effects_on_all_paths); |
| 849 | } |
| 850 | } |
| 851 | current = next; |
| 852 | } |
| 853 | } |
| 854 | |
| 855 | } } // namespace v8::internal |