blob: d00d8ce25cc7b4a4009366daa840e32554f56608 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen-bce.h"
6
7namespace v8 {
8namespace internal {
9
10
11// We try to "factor up" HBoundsCheck instructions towards the root of the
12// dominator tree.
13// For now we handle checks where the index is like "exp + int32value".
14// If in the dominator tree we check "exp + v1" and later (dominated)
15// "exp + v2", if v2 <= v1 we can safely remove the second check, and if
16// v2 > v1 we can use v2 in the 1st check and again remove the second.
17// To do so we keep a dictionary of all checks where the key if the pair
18// "exp, length".
19// The class BoundsCheckKey represents this key.
20class BoundsCheckKey : public ZoneObject {
21 public:
22 HValue* IndexBase() const { return index_base_; }
23 HValue* Length() const { return length_; }
24
25 uint32_t Hash() {
26 return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
27 }
28
29 static BoundsCheckKey* Create(Zone* zone,
30 HBoundsCheck* check,
31 int32_t* offset) {
32 if (!check->index()->representation().IsSmiOrInteger32()) return NULL;
33
34 HValue* index_base = NULL;
35 HConstant* constant = NULL;
36 bool is_sub = false;
37
38 if (check->index()->IsAdd()) {
39 HAdd* index = HAdd::cast(check->index());
40 if (index->left()->IsConstant()) {
41 constant = HConstant::cast(index->left());
42 index_base = index->right();
43 } else if (index->right()->IsConstant()) {
44 constant = HConstant::cast(index->right());
45 index_base = index->left();
46 }
47 } else if (check->index()->IsSub()) {
48 HSub* index = HSub::cast(check->index());
49 is_sub = true;
50 if (index->right()->IsConstant()) {
51 constant = HConstant::cast(index->right());
52 index_base = index->left();
53 }
54 } else if (check->index()->IsConstant()) {
55 index_base = check->block()->graph()->GetConstant0();
56 constant = HConstant::cast(check->index());
57 }
58
59 if (constant != NULL && constant->HasInteger32Value() &&
60 constant->Integer32Value() != kMinInt) {
61 *offset = is_sub ? - constant->Integer32Value()
62 : constant->Integer32Value();
63 } else {
64 *offset = 0;
65 index_base = check->index();
66 }
67
68 return new(zone) BoundsCheckKey(index_base, check->length());
69 }
70
71 private:
72 BoundsCheckKey(HValue* index_base, HValue* length)
73 : index_base_(index_base),
74 length_(length) { }
75
76 HValue* index_base_;
77 HValue* length_;
78
79 DISALLOW_COPY_AND_ASSIGN(BoundsCheckKey);
80};
81
82
83// Data about each HBoundsCheck that can be eliminated or moved.
84// It is the "value" in the dictionary indexed by "base-index, length"
85// (the key is BoundsCheckKey).
86// We scan the code with a dominator tree traversal.
87// Traversing the dominator tree we keep a stack (implemented as a singly
88// linked list) of "data" for each basic block that contains a relevant check
89// with the same key (the dictionary holds the head of the list).
90// We also keep all the "data" created for a given basic block in a list, and
91// use it to "clean up" the dictionary when backtracking in the dominator tree
92// traversal.
93// Doing this each dictionary entry always directly points to the check that
94// is dominating the code being examined now.
95// We also track the current "offset" of the index expression and use it to
96// decide if any check is already "covered" (so it can be removed) or not.
97class BoundsCheckBbData: public ZoneObject {
98 public:
99 BoundsCheckKey* Key() const { return key_; }
100 int32_t LowerOffset() const { return lower_offset_; }
101 int32_t UpperOffset() const { return upper_offset_; }
102 HBasicBlock* BasicBlock() const { return basic_block_; }
103 HBoundsCheck* LowerCheck() const { return lower_check_; }
104 HBoundsCheck* UpperCheck() const { return upper_check_; }
105 BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
106 BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
107
108 bool OffsetIsCovered(int32_t offset) const {
109 return offset >= LowerOffset() && offset <= UpperOffset();
110 }
111
112 bool HasSingleCheck() { return lower_check_ == upper_check_; }
113
114 void UpdateUpperOffsets(HBoundsCheck* check, int32_t offset) {
115 BoundsCheckBbData* data = FatherInDominatorTree();
116 while (data != NULL && data->UpperCheck() == check) {
117 DCHECK(data->upper_offset_ < offset);
118 data->upper_offset_ = offset;
119 data = data->FatherInDominatorTree();
120 }
121 }
122
123 void UpdateLowerOffsets(HBoundsCheck* check, int32_t offset) {
124 BoundsCheckBbData* data = FatherInDominatorTree();
125 while (data != NULL && data->LowerCheck() == check) {
126 DCHECK(data->lower_offset_ > offset);
127 data->lower_offset_ = offset;
128 data = data->FatherInDominatorTree();
129 }
130 }
131
132 // The goal of this method is to modify either upper_offset_ or
133 // lower_offset_ so that also new_offset is covered (the covered
134 // range grows).
135 //
136 // The precondition is that new_check follows UpperCheck() and
137 // LowerCheck() in the same basic block, and that new_offset is not
138 // covered (otherwise we could simply remove new_check).
139 //
140 // If HasSingleCheck() is true then new_check is added as "second check"
141 // (either upper or lower; note that HasSingleCheck() becomes false).
142 // Otherwise one of the current checks is modified so that it also covers
143 // new_offset, and new_check is removed.
144 void CoverCheck(HBoundsCheck* new_check,
145 int32_t new_offset) {
146 DCHECK(new_check->index()->representation().IsSmiOrInteger32());
147 bool keep_new_check = false;
148
149 if (new_offset > upper_offset_) {
150 upper_offset_ = new_offset;
151 if (HasSingleCheck()) {
152 keep_new_check = true;
153 upper_check_ = new_check;
154 } else {
155 TightenCheck(upper_check_, new_check, new_offset);
156 UpdateUpperOffsets(upper_check_, upper_offset_);
157 }
158 } else if (new_offset < lower_offset_) {
159 lower_offset_ = new_offset;
160 if (HasSingleCheck()) {
161 keep_new_check = true;
162 lower_check_ = new_check;
163 } else {
164 TightenCheck(lower_check_, new_check, new_offset);
165 UpdateLowerOffsets(lower_check_, lower_offset_);
166 }
167 } else {
168 // Should never have called CoverCheck() in this case.
169 UNREACHABLE();
170 }
171
172 if (!keep_new_check) {
173 if (FLAG_trace_bce) {
174 base::OS::Print("Eliminating check #%d after tightening\n",
175 new_check->id());
176 }
177 new_check->block()->graph()->isolate()->counters()->
178 bounds_checks_eliminated()->Increment();
179 new_check->DeleteAndReplaceWith(new_check->ActualValue());
180 } else {
181 HBoundsCheck* first_check = new_check == lower_check_ ? upper_check_
182 : lower_check_;
183 if (FLAG_trace_bce) {
184 base::OS::Print("Moving second check #%d after first check #%d\n",
185 new_check->id(), first_check->id());
186 }
187 // The length is guaranteed to be live at first_check.
188 DCHECK(new_check->length() == first_check->length());
189 HInstruction* old_position = new_check->next();
190 new_check->Unlink();
191 new_check->InsertAfter(first_check);
192 MoveIndexIfNecessary(new_check->index(), new_check, old_position);
193 }
194 }
195
196 BoundsCheckBbData(BoundsCheckKey* key,
197 int32_t lower_offset,
198 int32_t upper_offset,
199 HBasicBlock* bb,
200 HBoundsCheck* lower_check,
201 HBoundsCheck* upper_check,
202 BoundsCheckBbData* next_in_bb,
203 BoundsCheckBbData* father_in_dt)
204 : key_(key),
205 lower_offset_(lower_offset),
206 upper_offset_(upper_offset),
207 basic_block_(bb),
208 lower_check_(lower_check),
209 upper_check_(upper_check),
210 next_in_bb_(next_in_bb),
211 father_in_dt_(father_in_dt) { }
212
213 private:
214 BoundsCheckKey* key_;
215 int32_t lower_offset_;
216 int32_t upper_offset_;
217 HBasicBlock* basic_block_;
218 HBoundsCheck* lower_check_;
219 HBoundsCheck* upper_check_;
220 BoundsCheckBbData* next_in_bb_;
221 BoundsCheckBbData* father_in_dt_;
222
223 void MoveIndexIfNecessary(HValue* index_raw,
224 HBoundsCheck* insert_before,
225 HInstruction* end_of_scan_range) {
226 // index_raw can be HAdd(index_base, offset), HSub(index_base, offset),
227 // HConstant(offset) or index_base directly.
228 // In the latter case, no need to move anything.
229 if (index_raw->IsAdd() || index_raw->IsSub()) {
230 HArithmeticBinaryOperation* index =
231 HArithmeticBinaryOperation::cast(index_raw);
232 HValue* left_input = index->left();
233 HValue* right_input = index->right();
234 HValue* context = index->context();
235 bool must_move_index = false;
236 bool must_move_left_input = false;
237 bool must_move_right_input = false;
238 bool must_move_context = false;
239 for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
240 if (cursor == left_input) must_move_left_input = true;
241 if (cursor == right_input) must_move_right_input = true;
242 if (cursor == context) must_move_context = true;
243 if (cursor == index) must_move_index = true;
244 if (cursor->previous() == NULL) {
245 cursor = cursor->block()->dominator()->end();
246 } else {
247 cursor = cursor->previous();
248 }
249 }
250 if (must_move_index) {
251 index->Unlink();
252 index->InsertBefore(insert_before);
253 }
254 // The BCE algorithm only selects mergeable bounds checks that share
255 // the same "index_base", so we'll only ever have to move constants.
256 if (must_move_left_input) {
257 HConstant::cast(left_input)->Unlink();
258 HConstant::cast(left_input)->InsertBefore(index);
259 }
260 if (must_move_right_input) {
261 HConstant::cast(right_input)->Unlink();
262 HConstant::cast(right_input)->InsertBefore(index);
263 }
264 if (must_move_context) {
265 // Contexts are always constants.
266 HConstant::cast(context)->Unlink();
267 HConstant::cast(context)->InsertBefore(index);
268 }
269 } else if (index_raw->IsConstant()) {
270 HConstant* index = HConstant::cast(index_raw);
271 bool must_move = false;
272 for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
273 if (cursor == index) must_move = true;
274 if (cursor->previous() == NULL) {
275 cursor = cursor->block()->dominator()->end();
276 } else {
277 cursor = cursor->previous();
278 }
279 }
280 if (must_move) {
281 index->Unlink();
282 index->InsertBefore(insert_before);
283 }
284 }
285 }
286
287 void TightenCheck(HBoundsCheck* original_check,
288 HBoundsCheck* tighter_check,
289 int32_t new_offset) {
290 DCHECK(original_check->length() == tighter_check->length());
291 MoveIndexIfNecessary(tighter_check->index(), original_check, tighter_check);
292 original_check->ReplaceAllUsesWith(original_check->index());
293 original_check->SetOperandAt(0, tighter_check->index());
294 if (FLAG_trace_bce) {
295 base::OS::Print("Tightened check #%d with offset %d from #%d\n",
296 original_check->id(), new_offset, tighter_check->id());
297 }
298 }
299
300 DISALLOW_COPY_AND_ASSIGN(BoundsCheckBbData);
301};
302
303
304static bool BoundsCheckKeyMatch(void* key1, void* key2) {
305 BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
306 BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
307 return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
308}
309
310
311BoundsCheckTable::BoundsCheckTable(Zone* zone)
312 : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
313 ZoneAllocationPolicy(zone)) { }
314
315
316BoundsCheckBbData** BoundsCheckTable::LookupOrInsert(BoundsCheckKey* key,
317 Zone* zone) {
318 return reinterpret_cast<BoundsCheckBbData**>(
319 &(ZoneHashMap::LookupOrInsert(key, key->Hash(),
320 ZoneAllocationPolicy(zone))->value));
321}
322
323
324void BoundsCheckTable::Insert(BoundsCheckKey* key,
325 BoundsCheckBbData* data,
326 Zone* zone) {
327 ZoneHashMap::LookupOrInsert(key, key->Hash(), ZoneAllocationPolicy(zone))
328 ->value = data;
329}
330
331
332void BoundsCheckTable::Delete(BoundsCheckKey* key) {
333 Remove(key, key->Hash());
334}
335
336
337class HBoundsCheckEliminationState {
338 public:
339 HBasicBlock* block_;
340 BoundsCheckBbData* bb_data_list_;
341 int index_;
342};
343
344
345// Eliminates checks in bb and recursively in the dominated blocks.
346// Also replace the results of check instructions with the original value, if
347// the result is used. This is safe now, since we don't do code motion after
348// this point. It enables better register allocation since the value produced
349// by check instructions is really a copy of the original value.
350void HBoundsCheckEliminationPhase::EliminateRedundantBoundsChecks(
351 HBasicBlock* entry) {
352 // Allocate the stack.
353 HBoundsCheckEliminationState* stack =
354 zone()->NewArray<HBoundsCheckEliminationState>(graph()->blocks()->length());
355
356 // Explicitly push the entry block.
357 stack[0].block_ = entry;
358 stack[0].bb_data_list_ = PreProcessBlock(entry);
359 stack[0].index_ = 0;
360 int stack_depth = 1;
361
362 // Implement depth-first traversal with a stack.
363 while (stack_depth > 0) {
364 int current = stack_depth - 1;
365 HBoundsCheckEliminationState* state = &stack[current];
366 const ZoneList<HBasicBlock*>* children = state->block_->dominated_blocks();
367
368 if (state->index_ < children->length()) {
369 // Recursively visit children blocks.
370 HBasicBlock* child = children->at(state->index_++);
371 int next = stack_depth++;
372 stack[next].block_ = child;
373 stack[next].bb_data_list_ = PreProcessBlock(child);
374 stack[next].index_ = 0;
375 } else {
376 // Finished with all children; post process the block.
377 PostProcessBlock(state->block_, state->bb_data_list_);
378 stack_depth--;
379 }
380 }
381}
382
383
384BoundsCheckBbData* HBoundsCheckEliminationPhase::PreProcessBlock(
385 HBasicBlock* bb) {
386 BoundsCheckBbData* bb_data_list = NULL;
387
388 for (HInstructionIterator it(bb); !it.Done(); it.Advance()) {
389 HInstruction* i = it.Current();
390 if (!i->IsBoundsCheck()) continue;
391
392 HBoundsCheck* check = HBoundsCheck::cast(i);
393 int32_t offset = 0;
394 BoundsCheckKey* key =
395 BoundsCheckKey::Create(zone(), check, &offset);
396 if (key == NULL) continue;
397 BoundsCheckBbData** data_p = table_.LookupOrInsert(key, zone());
398 BoundsCheckBbData* data = *data_p;
399 if (data == NULL) {
400 bb_data_list = new(zone()) BoundsCheckBbData(key,
401 offset,
402 offset,
403 bb,
404 check,
405 check,
406 bb_data_list,
407 NULL);
408 *data_p = bb_data_list;
409 if (FLAG_trace_bce) {
410 base::OS::Print("Fresh bounds check data for block #%d: [%d]\n",
411 bb->block_id(), offset);
412 }
413 } else if (data->OffsetIsCovered(offset)) {
414 bb->graph()->isolate()->counters()->
415 bounds_checks_eliminated()->Increment();
416 if (FLAG_trace_bce) {
417 base::OS::Print("Eliminating bounds check #%d, offset %d is covered\n",
418 check->id(), offset);
419 }
420 check->DeleteAndReplaceWith(check->ActualValue());
421 } else if (data->BasicBlock() == bb) {
422 // TODO(jkummerow): I think the following logic would be preferable:
423 // if (data->Basicblock() == bb ||
424 // graph()->use_optimistic_licm() ||
425 // bb->IsLoopSuccessorDominator()) {
426 // data->CoverCheck(check, offset)
427 // } else {
428 // /* add pristine BCBbData like in (data == NULL) case above */
429 // }
430 // Even better would be: distinguish between read-only dominator-imposed
431 // knowledge and modifiable upper/lower checks.
432 // What happens currently is that the first bounds check in a dominated
433 // block will stay around while any further checks are hoisted out,
434 // which doesn't make sense. Investigate/fix this in a future CL.
435 data->CoverCheck(check, offset);
436 } else if (graph()->use_optimistic_licm() ||
437 bb->IsLoopSuccessorDominator()) {
438 int32_t new_lower_offset = offset < data->LowerOffset()
439 ? offset
440 : data->LowerOffset();
441 int32_t new_upper_offset = offset > data->UpperOffset()
442 ? offset
443 : data->UpperOffset();
444 bb_data_list = new(zone()) BoundsCheckBbData(key,
445 new_lower_offset,
446 new_upper_offset,
447 bb,
448 data->LowerCheck(),
449 data->UpperCheck(),
450 bb_data_list,
451 data);
452 if (FLAG_trace_bce) {
453 base::OS::Print("Updated bounds check data for block #%d: [%d - %d]\n",
454 bb->block_id(), new_lower_offset, new_upper_offset);
455 }
456 table_.Insert(key, bb_data_list, zone());
457 }
458 }
459
460 return bb_data_list;
461}
462
463
464void HBoundsCheckEliminationPhase::PostProcessBlock(
465 HBasicBlock* block, BoundsCheckBbData* data) {
466 while (data != NULL) {
467 if (data->FatherInDominatorTree()) {
468 table_.Insert(data->Key(), data->FatherInDominatorTree(), zone());
469 } else {
470 table_.Delete(data->Key());
471 }
472 data = data->NextInBasicBlock();
473 }
474}
475
476} // namespace internal
477} // namespace v8