blob: 5e4ad3714aee9d7176fdc81e5b783b097d311b8d [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen-instructions.h"
6
7#include "src/base/bits.h"
8#include "src/base/safe_math.h"
9#include "src/crankshaft/hydrogen-infer-representation.h"
10#include "src/double.h"
11#include "src/elements.h"
12#include "src/factory.h"
13
14#if V8_TARGET_ARCH_IA32
15#include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
16#elif V8_TARGET_ARCH_X64
17#include "src/crankshaft/x64/lithium-x64.h" // NOLINT
18#elif V8_TARGET_ARCH_ARM64
19#include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
20#elif V8_TARGET_ARCH_ARM
21#include "src/crankshaft/arm/lithium-arm.h" // NOLINT
22#elif V8_TARGET_ARCH_PPC
23#include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
24#elif V8_TARGET_ARCH_MIPS
25#include "src/crankshaft/mips/lithium-mips.h" // NOLINT
26#elif V8_TARGET_ARCH_MIPS64
27#include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +010028#elif V8_TARGET_ARCH_S390
29#include "src/crankshaft/s390/lithium-s390.h" // NOLINT
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030#elif V8_TARGET_ARCH_X87
31#include "src/crankshaft/x87/lithium-x87.h" // NOLINT
32#else
33#error Unsupported target architecture.
34#endif
35
36namespace v8 {
37namespace internal {
38
39#define DEFINE_COMPILE(type) \
40 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
41 return builder->Do##type(this); \
42 }
43HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
44#undef DEFINE_COMPILE
45
46
47Isolate* HValue::isolate() const {
48 DCHECK(block() != NULL);
49 return block()->isolate();
50}
51
52
53void HValue::AssumeRepresentation(Representation r) {
54 if (CheckFlag(kFlexibleRepresentation)) {
55 ChangeRepresentation(r);
56 // The representation of the value is dictated by type feedback and
57 // will not be changed later.
58 ClearFlag(kFlexibleRepresentation);
59 }
60}
61
62
63void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
64 DCHECK(CheckFlag(kFlexibleRepresentation));
65 Representation new_rep = RepresentationFromInputs();
66 UpdateRepresentation(new_rep, h_infer, "inputs");
67 new_rep = RepresentationFromUses();
68 UpdateRepresentation(new_rep, h_infer, "uses");
69 if (representation().IsSmi() && HasNonSmiUse()) {
70 UpdateRepresentation(
71 Representation::Integer32(), h_infer, "use requirements");
72 }
73}
74
75
76Representation HValue::RepresentationFromUses() {
77 if (HasNoUses()) return Representation::None();
78 Representation result = Representation::None();
79
80 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
81 HValue* use = it.value();
82 Representation rep = use->observed_input_representation(it.index());
83 result = result.generalize(rep);
84
85 if (FLAG_trace_representation) {
86 PrintF("#%d %s is used by #%d %s as %s%s\n",
87 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
88 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
89 }
90 }
91 if (IsPhi()) {
92 result = result.generalize(
93 HPhi::cast(this)->representation_from_indirect_uses());
94 }
95
96 // External representations are dealt with separately.
97 return result.IsExternal() ? Representation::None() : result;
98}
99
100
101void HValue::UpdateRepresentation(Representation new_rep,
102 HInferRepresentationPhase* h_infer,
103 const char* reason) {
104 Representation r = representation();
105 if (new_rep.is_more_general_than(r)) {
106 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
107 if (FLAG_trace_representation) {
108 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
109 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
110 }
111 ChangeRepresentation(new_rep);
112 AddDependantsToWorklist(h_infer);
113 }
114}
115
116
117void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
118 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
119 h_infer->AddToWorklist(it.value());
120 }
121 for (int i = 0; i < OperandCount(); ++i) {
122 h_infer->AddToWorklist(OperandAt(i));
123 }
124}
125
126
127static int32_t ConvertAndSetOverflow(Representation r,
128 int64_t result,
129 bool* overflow) {
130 if (r.IsSmi()) {
131 if (result > Smi::kMaxValue) {
132 *overflow = true;
133 return Smi::kMaxValue;
134 }
135 if (result < Smi::kMinValue) {
136 *overflow = true;
137 return Smi::kMinValue;
138 }
139 } else {
140 if (result > kMaxInt) {
141 *overflow = true;
142 return kMaxInt;
143 }
144 if (result < kMinInt) {
145 *overflow = true;
146 return kMinInt;
147 }
148 }
149 return static_cast<int32_t>(result);
150}
151
152
153static int32_t AddWithoutOverflow(Representation r,
154 int32_t a,
155 int32_t b,
156 bool* overflow) {
157 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
158 return ConvertAndSetOverflow(r, result, overflow);
159}
160
161
162static int32_t SubWithoutOverflow(Representation r,
163 int32_t a,
164 int32_t b,
165 bool* overflow) {
166 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
167 return ConvertAndSetOverflow(r, result, overflow);
168}
169
170
171static int32_t MulWithoutOverflow(const Representation& r,
172 int32_t a,
173 int32_t b,
174 bool* overflow) {
175 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
176 return ConvertAndSetOverflow(r, result, overflow);
177}
178
179
180int32_t Range::Mask() const {
181 if (lower_ == upper_) return lower_;
182 if (lower_ >= 0) {
183 int32_t res = 1;
184 while (res < upper_) {
185 res = (res << 1) | 1;
186 }
187 return res;
188 }
189 return 0xffffffff;
190}
191
192
193void Range::AddConstant(int32_t value) {
194 if (value == 0) return;
195 bool may_overflow = false; // Overflow is ignored here.
196 Representation r = Representation::Integer32();
197 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
198 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
199#ifdef DEBUG
200 Verify();
201#endif
202}
203
204
205void Range::Intersect(Range* other) {
206 upper_ = Min(upper_, other->upper_);
207 lower_ = Max(lower_, other->lower_);
208 bool b = CanBeMinusZero() && other->CanBeMinusZero();
209 set_can_be_minus_zero(b);
210}
211
212
213void Range::Union(Range* other) {
214 upper_ = Max(upper_, other->upper_);
215 lower_ = Min(lower_, other->lower_);
216 bool b = CanBeMinusZero() || other->CanBeMinusZero();
217 set_can_be_minus_zero(b);
218}
219
220
221void Range::CombinedMax(Range* other) {
222 upper_ = Max(upper_, other->upper_);
223 lower_ = Max(lower_, other->lower_);
224 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
225}
226
227
228void Range::CombinedMin(Range* other) {
229 upper_ = Min(upper_, other->upper_);
230 lower_ = Min(lower_, other->lower_);
231 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
232}
233
234
235void Range::Sar(int32_t value) {
236 int32_t bits = value & 0x1F;
237 lower_ = lower_ >> bits;
238 upper_ = upper_ >> bits;
239 set_can_be_minus_zero(false);
240}
241
242
243void Range::Shl(int32_t value) {
244 int32_t bits = value & 0x1F;
245 int old_lower = lower_;
246 int old_upper = upper_;
247 lower_ = lower_ << bits;
248 upper_ = upper_ << bits;
249 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
250 upper_ = kMaxInt;
251 lower_ = kMinInt;
252 }
253 set_can_be_minus_zero(false);
254}
255
256
257bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
258 bool may_overflow = false;
259 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
260 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
261 KeepOrder();
262#ifdef DEBUG
263 Verify();
264#endif
265 return may_overflow;
266}
267
268
269bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
270 bool may_overflow = false;
271 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
272 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
273 KeepOrder();
274#ifdef DEBUG
275 Verify();
276#endif
277 return may_overflow;
278}
279
280
281void Range::KeepOrder() {
282 if (lower_ > upper_) {
283 int32_t tmp = lower_;
284 lower_ = upper_;
285 upper_ = tmp;
286 }
287}
288
289
290#ifdef DEBUG
291void Range::Verify() const {
292 DCHECK(lower_ <= upper_);
293}
294#endif
295
296
297bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
298 bool may_overflow = false;
299 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
300 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
301 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
302 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
303 lower_ = Min(Min(v1, v2), Min(v3, v4));
304 upper_ = Max(Max(v1, v2), Max(v3, v4));
305#ifdef DEBUG
306 Verify();
307#endif
308 return may_overflow;
309}
310
311
312bool HValue::IsDefinedAfter(HBasicBlock* other) const {
313 return block()->block_id() > other->block_id();
314}
315
316
317HUseListNode* HUseListNode::tail() {
318 // Skip and remove dead items in the use list.
319 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
320 tail_ = tail_->tail_;
321 }
322 return tail_;
323}
324
325
326bool HValue::CheckUsesForFlag(Flag f) const {
327 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
328 if (it.value()->IsSimulate()) continue;
329 if (!it.value()->CheckFlag(f)) return false;
330 }
331 return true;
332}
333
334
335bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
336 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
337 if (it.value()->IsSimulate()) continue;
338 if (!it.value()->CheckFlag(f)) {
339 *value = it.value();
340 return false;
341 }
342 }
343 return true;
344}
345
346
347bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
348 bool return_value = false;
349 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
350 if (it.value()->IsSimulate()) continue;
351 if (!it.value()->CheckFlag(f)) return false;
352 return_value = true;
353 }
354 return return_value;
355}
356
357
358HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
359 Advance();
360}
361
362
363void HUseIterator::Advance() {
364 current_ = next_;
365 if (current_ != NULL) {
366 next_ = current_->tail();
367 value_ = current_->value();
368 index_ = current_->index();
369 }
370}
371
372
373int HValue::UseCount() const {
374 int count = 0;
375 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
376 return count;
377}
378
379
380HUseListNode* HValue::RemoveUse(HValue* value, int index) {
381 HUseListNode* previous = NULL;
382 HUseListNode* current = use_list_;
383 while (current != NULL) {
384 if (current->value() == value && current->index() == index) {
385 if (previous == NULL) {
386 use_list_ = current->tail();
387 } else {
388 previous->set_tail(current->tail());
389 }
390 break;
391 }
392
393 previous = current;
394 current = current->tail();
395 }
396
397#ifdef DEBUG
398 // Do not reuse use list nodes in debug mode, zap them.
399 if (current != NULL) {
400 HUseListNode* temp =
401 new(block()->zone())
402 HUseListNode(current->value(), current->index(), NULL);
403 current->Zap();
404 current = temp;
405 }
406#endif
407 return current;
408}
409
410
411bool HValue::Equals(HValue* other) {
412 if (other->opcode() != opcode()) return false;
413 if (!other->representation().Equals(representation())) return false;
414 if (!other->type_.Equals(type_)) return false;
415 if (other->flags() != flags()) return false;
416 if (OperandCount() != other->OperandCount()) return false;
417 for (int i = 0; i < OperandCount(); ++i) {
418 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
419 }
420 bool result = DataEquals(other);
421 DCHECK(!result || Hashcode() == other->Hashcode());
422 return result;
423}
424
425
426intptr_t HValue::Hashcode() {
427 intptr_t result = opcode();
428 int count = OperandCount();
429 for (int i = 0; i < count; ++i) {
430 result = result * 19 + OperandAt(i)->id() + (result >> 7);
431 }
432 return result;
433}
434
435
436const char* HValue::Mnemonic() const {
437 switch (opcode()) {
438#define MAKE_CASE(type) case k##type: return #type;
439 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
440#undef MAKE_CASE
441 case kPhi: return "Phi";
442 default: return "";
443 }
444}
445
446
447bool HValue::CanReplaceWithDummyUses() {
448 return FLAG_unreachable_code_elimination &&
449 !(block()->IsReachable() ||
450 IsBlockEntry() ||
451 IsControlInstruction() ||
452 IsArgumentsObject() ||
453 IsCapturedObject() ||
454 IsSimulate() ||
455 IsEnterInlined() ||
456 IsLeaveInlined());
457}
458
459
460bool HValue::IsInteger32Constant() {
461 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
462}
463
464
465int32_t HValue::GetInteger32Constant() {
466 return HConstant::cast(this)->Integer32Value();
467}
468
469
470bool HValue::EqualsInteger32Constant(int32_t value) {
471 return IsInteger32Constant() && GetInteger32Constant() == value;
472}
473
474
475void HValue::SetOperandAt(int index, HValue* value) {
476 RegisterUse(index, value);
477 InternalSetOperandAt(index, value);
478}
479
480
481void HValue::DeleteAndReplaceWith(HValue* other) {
482 // We replace all uses first, so Delete can assert that there are none.
483 if (other != NULL) ReplaceAllUsesWith(other);
484 Kill();
485 DeleteFromGraph();
486}
487
488
489void HValue::ReplaceAllUsesWith(HValue* other) {
490 while (use_list_ != NULL) {
491 HUseListNode* list_node = use_list_;
492 HValue* value = list_node->value();
493 DCHECK(!value->block()->IsStartBlock());
494 value->InternalSetOperandAt(list_node->index(), other);
495 use_list_ = list_node->tail();
496 list_node->set_tail(other->use_list_);
497 other->use_list_ = list_node;
498 }
499}
500
501
502void HValue::Kill() {
503 // Instead of going through the entire use list of each operand, we only
504 // check the first item in each use list and rely on the tail() method to
505 // skip dead items, removing them lazily next time we traverse the list.
506 SetFlag(kIsDead);
507 for (int i = 0; i < OperandCount(); ++i) {
508 HValue* operand = OperandAt(i);
509 if (operand == NULL) continue;
510 HUseListNode* first = operand->use_list_;
511 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
512 operand->use_list_ = first->tail();
513 }
514 }
515}
516
517
518void HValue::SetBlock(HBasicBlock* block) {
519 DCHECK(block_ == NULL || block == NULL);
520 block_ = block;
521 if (id_ == kNoNumber && block != NULL) {
522 id_ = block->graph()->GetNextValueID(this);
523 }
524}
525
526
527std::ostream& operator<<(std::ostream& os, const HValue& v) {
528 return v.PrintTo(os);
529}
530
531
532std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
533 if (t.value->representation().IsTagged() &&
534 !t.value->type().Equals(HType::Tagged()))
535 return os;
536 return os << " type:" << t.value->type();
537}
538
539
540std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
541 GVNFlagSet changes_flags = c.value->ChangesFlags();
542 if (changes_flags.IsEmpty()) return os;
543 os << " changes[";
544 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
545 os << "*";
546 } else {
547 bool add_comma = false;
548#define PRINT_DO(Type) \
549 if (changes_flags.Contains(k##Type)) { \
550 if (add_comma) os << ","; \
551 add_comma = true; \
552 os << #Type; \
553 }
554 GVN_TRACKED_FLAG_LIST(PRINT_DO);
555 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
556#undef PRINT_DO
557 }
558 return os << "]";
559}
560
561
562bool HValue::HasMonomorphicJSObjectType() {
563 return !GetMonomorphicJSObjectMap().is_null();
564}
565
566
567bool HValue::UpdateInferredType() {
568 HType type = CalculateInferredType();
569 bool result = (!type.Equals(type_));
570 type_ = type;
571 return result;
572}
573
574
575void HValue::RegisterUse(int index, HValue* new_value) {
576 HValue* old_value = OperandAt(index);
577 if (old_value == new_value) return;
578
579 HUseListNode* removed = NULL;
580 if (old_value != NULL) {
581 removed = old_value->RemoveUse(this, index);
582 }
583
584 if (new_value != NULL) {
585 if (removed == NULL) {
586 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
587 this, index, new_value->use_list_);
588 } else {
589 removed->set_tail(new_value->use_list_);
590 new_value->use_list_ = removed;
591 }
592 }
593}
594
595
596void HValue::AddNewRange(Range* r, Zone* zone) {
597 if (!HasRange()) ComputeInitialRange(zone);
598 if (!HasRange()) range_ = new(zone) Range();
599 DCHECK(HasRange());
600 r->StackUpon(range_);
601 range_ = r;
602}
603
604
605void HValue::RemoveLastAddedRange() {
606 DCHECK(HasRange());
607 DCHECK(range_->next() != NULL);
608 range_ = range_->next();
609}
610
611
612void HValue::ComputeInitialRange(Zone* zone) {
613 DCHECK(!HasRange());
614 range_ = InferRange(zone);
615 DCHECK(HasRange());
616}
617
618
619std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
620 os << Mnemonic() << " ";
621 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
622 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
623 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
624 return os;
625}
626
627
628std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
629 for (int i = 0; i < OperandCount(); ++i) {
630 if (i > 0) os << " ";
631 os << NameOf(OperandAt(i));
632 }
633 return os;
634}
635
636
637void HInstruction::Unlink() {
638 DCHECK(IsLinked());
639 DCHECK(!IsControlInstruction()); // Must never move control instructions.
640 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
641 DCHECK(previous_ != NULL);
642 previous_->next_ = next_;
643 if (next_ == NULL) {
644 DCHECK(block()->last() == this);
645 block()->set_last(previous_);
646 } else {
647 next_->previous_ = previous_;
648 }
649 clear_block();
650}
651
652
653void HInstruction::InsertBefore(HInstruction* next) {
654 DCHECK(!IsLinked());
655 DCHECK(!next->IsBlockEntry());
656 DCHECK(!IsControlInstruction());
657 DCHECK(!next->block()->IsStartBlock());
658 DCHECK(next->previous_ != NULL);
659 HInstruction* prev = next->previous();
660 prev->next_ = this;
661 next->previous_ = this;
662 next_ = next;
663 previous_ = prev;
664 SetBlock(next->block());
665 if (!has_position() && next->has_position()) {
666 set_position(next->position());
667 }
668}
669
670
671void HInstruction::InsertAfter(HInstruction* previous) {
672 DCHECK(!IsLinked());
673 DCHECK(!previous->IsControlInstruction());
674 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
675 HBasicBlock* block = previous->block();
676 // Never insert anything except constants into the start block after finishing
677 // it.
678 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
679 DCHECK(block->end()->SecondSuccessor() == NULL);
680 InsertAfter(block->end()->FirstSuccessor()->first());
681 return;
682 }
683
684 // If we're inserting after an instruction with side-effects that is
685 // followed by a simulate instruction, we need to insert after the
686 // simulate instruction instead.
687 HInstruction* next = previous->next_;
688 if (previous->HasObservableSideEffects() && next != NULL) {
689 DCHECK(next->IsSimulate());
690 previous = next;
691 next = previous->next_;
692 }
693
694 previous_ = previous;
695 next_ = next;
696 SetBlock(block);
697 previous->next_ = this;
698 if (next != NULL) next->previous_ = this;
699 if (block->last() == previous) {
700 block->set_last(this);
701 }
702 if (!has_position() && previous->has_position()) {
703 set_position(previous->position());
704 }
705}
706
707
708bool HInstruction::Dominates(HInstruction* other) {
709 if (block() != other->block()) {
710 return block()->Dominates(other->block());
711 }
712 // Both instructions are in the same basic block. This instruction
713 // should precede the other one in order to dominate it.
714 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
715 if (instr == other) {
716 return true;
717 }
718 }
719 return false;
720}
721
722
723#ifdef DEBUG
724void HInstruction::Verify() {
725 // Verify that input operands are defined before use.
726 HBasicBlock* cur_block = block();
727 for (int i = 0; i < OperandCount(); ++i) {
728 HValue* other_operand = OperandAt(i);
729 if (other_operand == NULL) continue;
730 HBasicBlock* other_block = other_operand->block();
731 if (cur_block == other_block) {
732 if (!other_operand->IsPhi()) {
733 HInstruction* cur = this->previous();
734 while (cur != NULL) {
735 if (cur == other_operand) break;
736 cur = cur->previous();
737 }
738 // Must reach other operand in the same block!
739 DCHECK(cur == other_operand);
740 }
741 } else {
742 // If the following assert fires, you may have forgotten an
743 // AddInstruction.
744 DCHECK(other_block->Dominates(cur_block));
745 }
746 }
747
748 // Verify that instructions that may have side-effects are followed
749 // by a simulate instruction.
750 if (HasObservableSideEffects() && !IsOsrEntry()) {
751 DCHECK(next()->IsSimulate());
752 }
753
754 // Verify that instructions that can be eliminated by GVN have overridden
755 // HValue::DataEquals. The default implementation is UNREACHABLE. We
756 // don't actually care whether DataEquals returns true or false here.
757 if (CheckFlag(kUseGVN)) DataEquals(this);
758
759 // Verify that all uses are in the graph.
760 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
761 if (use.value()->IsInstruction()) {
762 DCHECK(HInstruction::cast(use.value())->IsLinked());
763 }
764 }
765}
766#endif
767
768
769bool HInstruction::CanDeoptimize() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000770 switch (opcode()) {
771 case HValue::kAbnormalExit:
772 case HValue::kAccessArgumentsAt:
773 case HValue::kAllocate:
774 case HValue::kArgumentsElements:
775 case HValue::kArgumentsLength:
776 case HValue::kArgumentsObject:
777 case HValue::kBlockEntry:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000778 case HValue::kCallNewArray:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000779 case HValue::kCapturedObject:
780 case HValue::kClassOfTestAndBranch:
781 case HValue::kCompareGeneric:
782 case HValue::kCompareHoleAndBranch:
783 case HValue::kCompareMap:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000784 case HValue::kCompareNumericAndBranch:
785 case HValue::kCompareObjectEqAndBranch:
786 case HValue::kConstant:
787 case HValue::kConstructDouble:
788 case HValue::kContext:
789 case HValue::kDebugBreak:
790 case HValue::kDeclareGlobals:
791 case HValue::kDoubleBits:
792 case HValue::kDummyUse:
793 case HValue::kEnterInlined:
794 case HValue::kEnvironmentMarker:
795 case HValue::kForceRepresentation:
796 case HValue::kGetCachedArrayIndex:
797 case HValue::kGoto:
798 case HValue::kHasCachedArrayIndexAndBranch:
799 case HValue::kHasInstanceTypeAndBranch:
800 case HValue::kInnerAllocatedObject:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 case HValue::kIsSmiAndBranch:
802 case HValue::kIsStringAndBranch:
803 case HValue::kIsUndetectableAndBranch:
804 case HValue::kLeaveInlined:
805 case HValue::kLoadFieldByIndex:
806 case HValue::kLoadGlobalGeneric:
807 case HValue::kLoadNamedField:
808 case HValue::kLoadNamedGeneric:
809 case HValue::kLoadRoot:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 case HValue::kMathMinMax:
811 case HValue::kParameter:
812 case HValue::kPhi:
813 case HValue::kPushArguments:
814 case HValue::kReturn:
815 case HValue::kSeqStringGetChar:
816 case HValue::kStoreCodeEntry:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000817 case HValue::kStoreKeyed:
818 case HValue::kStoreNamedField:
819 case HValue::kStoreNamedGeneric:
820 case HValue::kStringCharCodeAt:
821 case HValue::kStringCharFromCode:
822 case HValue::kThisFunction:
823 case HValue::kTypeofIsAndBranch:
824 case HValue::kUnknownOSRValue:
825 case HValue::kUseConst:
826 return false;
827
828 case HValue::kAdd:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000829 case HValue::kApplyArguments:
830 case HValue::kBitwise:
831 case HValue::kBoundsCheck:
832 case HValue::kBranch:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000833 case HValue::kCallRuntime:
834 case HValue::kCallWithDescriptor:
835 case HValue::kChange:
836 case HValue::kCheckArrayBufferNotNeutered:
837 case HValue::kCheckHeapObject:
838 case HValue::kCheckInstanceType:
839 case HValue::kCheckMapValue:
840 case HValue::kCheckMaps:
841 case HValue::kCheckSmi:
842 case HValue::kCheckValue:
843 case HValue::kClampToUint8:
844 case HValue::kDeoptimize:
845 case HValue::kDiv:
846 case HValue::kForInCacheArray:
847 case HValue::kForInPrepareMap:
848 case HValue::kHasInPrototypeChainAndBranch:
849 case HValue::kInvokeFunction:
850 case HValue::kLoadContextSlot:
851 case HValue::kLoadFunctionPrototype:
852 case HValue::kLoadKeyed:
853 case HValue::kLoadKeyedGeneric:
854 case HValue::kMathFloorOfDiv:
855 case HValue::kMaybeGrowElements:
856 case HValue::kMod:
857 case HValue::kMul:
858 case HValue::kOsrEntry:
859 case HValue::kPower:
860 case HValue::kPrologue:
861 case HValue::kRor:
862 case HValue::kSar:
863 case HValue::kSeqStringSetChar:
864 case HValue::kShl:
865 case HValue::kShr:
866 case HValue::kSimulate:
867 case HValue::kStackCheck:
868 case HValue::kStoreContextSlot:
869 case HValue::kStoreKeyedGeneric:
870 case HValue::kStringAdd:
871 case HValue::kStringCompareAndBranch:
872 case HValue::kSub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873 case HValue::kTransitionElementsKind:
874 case HValue::kTrapAllocationMemento:
875 case HValue::kTypeof:
876 case HValue::kUnaryMathOperation:
877 case HValue::kWrapReceiver:
878 return true;
879 }
880 UNREACHABLE();
881 return true;
882}
883
884
885std::ostream& operator<<(std::ostream& os, const NameOf& v) {
886 return os << v.value->representation().Mnemonic() << v.value->id();
887}
888
889std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
890 return os << NameOf(value());
891}
892
893
894std::ostream& HEnvironmentMarker::PrintDataTo(
895 std::ostream& os) const { // NOLINT
896 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
897 << "]";
898}
899
900
901std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
902 return os << NameOf(value()) << " #" << argument_count();
903}
904
905
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000906std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
907 return os << NameOf(first()) << " " << NameOf(second()) << " #"
908 << argument_count();
909}
910
Ben Murdochda12d292016-06-02 14:46:10 +0100911std::ostream& HInvokeFunction::PrintTo(std::ostream& os) const { // NOLINT
912 if (tail_call_mode() == TailCallMode::kAllow) os << "Tail";
913 return HBinaryCall::PrintTo(os);
914}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915
Ben Murdochda12d292016-06-02 14:46:10 +0100916std::ostream& HInvokeFunction::PrintDataTo(std::ostream& os) const { // NOLINT
917 HBinaryCall::PrintDataTo(os);
918 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
919 os << ", JSTailCall";
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000920 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000921 return os;
922}
923
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
925 os << NameOf(index()) << " " << NameOf(length());
926 if (base() != NULL && (offset() != 0 || scale() != 0)) {
927 os << " base: ((";
928 if (base() != index()) {
929 os << NameOf(index());
930 } else {
931 os << "index";
932 }
933 os << " + " << offset() << ") >> " << scale() << ")";
934 }
935 if (skip_check()) os << " [DISABLED]";
936 return os;
937}
938
939
940void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
941 DCHECK(CheckFlag(kFlexibleRepresentation));
942 HValue* actual_index = index()->ActualValue();
943 HValue* actual_length = length()->ActualValue();
944 Representation index_rep = actual_index->representation();
945 Representation length_rep = actual_length->representation();
946 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
947 index_rep = Representation::Smi();
948 }
949 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
950 length_rep = Representation::Smi();
951 }
952 Representation r = index_rep.generalize(length_rep);
953 if (r.is_more_general_than(Representation::Integer32())) {
954 r = Representation::Integer32();
955 }
956 UpdateRepresentation(r, h_infer, "boundscheck");
957}
958
959
960Range* HBoundsCheck::InferRange(Zone* zone) {
961 Representation r = representation();
962 if (r.IsSmiOrInteger32() && length()->HasRange()) {
963 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
964 int lower = 0;
965
966 Range* result = new(zone) Range(lower, upper);
967 if (index()->HasRange()) {
968 result->Intersect(index()->range());
969 }
970
971 // In case of Smi representation, clamp result to Smi::kMaxValue.
972 if (r.IsSmi()) result->ClampToSmi();
973 return result;
974 }
975 return HValue::InferRange(zone);
976}
977
978
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000979std::ostream& HCallWithDescriptor::PrintDataTo(
980 std::ostream& os) const { // NOLINT
981 for (int i = 0; i < OperandCount(); i++) {
982 os << NameOf(OperandAt(i)) << " ";
983 }
Ben Murdochda12d292016-06-02 14:46:10 +0100984 os << "#" << argument_count();
985 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
986 os << ", JSTailCall";
987 }
988 return os;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989}
990
991
992std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
993 os << ElementsKindToString(elements_kind()) << " ";
994 return HBinaryCall::PrintDataTo(os);
995}
996
997
998std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
999 os << function()->name << " ";
1000 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1001 return os << "#" << argument_count();
1002}
1003
1004
1005std::ostream& HClassOfTestAndBranch::PrintDataTo(
1006 std::ostream& os) const { // NOLINT
1007 return os << "class_of_test(" << NameOf(value()) << ", \""
1008 << class_name()->ToCString().get() << "\")";
1009}
1010
1011
1012std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1013 return os << NameOf(receiver()) << " " << NameOf(function());
1014}
1015
1016
1017std::ostream& HAccessArgumentsAt::PrintDataTo(
1018 std::ostream& os) const { // NOLINT
1019 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1020 << NameOf(length());
1021}
1022
1023
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024std::ostream& HControlInstruction::PrintDataTo(
1025 std::ostream& os) const { // NOLINT
1026 os << " goto (";
1027 bool first_block = true;
1028 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1029 if (!first_block) os << ", ";
1030 os << *it.Current();
1031 first_block = false;
1032 }
1033 return os << ")";
1034}
1035
1036
1037std::ostream& HUnaryControlInstruction::PrintDataTo(
1038 std::ostream& os) const { // NOLINT
1039 os << NameOf(value());
1040 return HControlInstruction::PrintDataTo(os);
1041}
1042
1043
1044std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1045 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1046 << " values)";
1047}
1048
1049
1050Representation HBranch::observed_input_representation(int index) {
Ben Murdochda12d292016-06-02 14:46:10 +01001051 if (expected_input_types_.Contains(ToBooleanICStub::NULL_TYPE) ||
1052 expected_input_types_.Contains(ToBooleanICStub::SPEC_OBJECT) ||
1053 expected_input_types_.Contains(ToBooleanICStub::STRING) ||
1054 expected_input_types_.Contains(ToBooleanICStub::SYMBOL) ||
1055 expected_input_types_.Contains(ToBooleanICStub::SIMD_VALUE)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001056 return Representation::Tagged();
1057 }
Ben Murdochda12d292016-06-02 14:46:10 +01001058 if (expected_input_types_.Contains(ToBooleanICStub::UNDEFINED)) {
1059 if (expected_input_types_.Contains(ToBooleanICStub::HEAP_NUMBER)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001060 return Representation::Double();
1061 }
1062 return Representation::Tagged();
1063 }
Ben Murdochda12d292016-06-02 14:46:10 +01001064 if (expected_input_types_.Contains(ToBooleanICStub::HEAP_NUMBER)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065 return Representation::Double();
1066 }
Ben Murdochda12d292016-06-02 14:46:10 +01001067 if (expected_input_types_.Contains(ToBooleanICStub::SMI)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001068 return Representation::Smi();
1069 }
1070 return Representation::None();
1071}
1072
1073
1074bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1075 HValue* value = this->value();
1076 if (value->EmitAtUses()) {
1077 DCHECK(value->IsConstant());
1078 DCHECK(!value->representation().IsDouble());
1079 *block = HConstant::cast(value)->BooleanValue()
1080 ? FirstSuccessor()
1081 : SecondSuccessor();
1082 return true;
1083 }
1084 *block = NULL;
1085 return false;
1086}
1087
1088
1089std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1090 return HUnaryControlInstruction::PrintDataTo(os) << " "
1091 << expected_input_types();
1092}
1093
1094
1095std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1096 os << NameOf(value()) << " (" << *map().handle() << ")";
1097 HControlInstruction::PrintDataTo(os);
1098 if (known_successor_index() == 0) {
1099 os << " [true]";
1100 } else if (known_successor_index() == 1) {
1101 os << " [false]";
1102 }
1103 return os;
1104}
1105
1106
1107const char* HUnaryMathOperation::OpName() const {
1108 switch (op()) {
1109 case kMathFloor:
1110 return "floor";
1111 case kMathFround:
1112 return "fround";
1113 case kMathRound:
1114 return "round";
1115 case kMathAbs:
1116 return "abs";
1117 case kMathLog:
1118 return "log";
1119 case kMathExp:
1120 return "exp";
1121 case kMathSqrt:
1122 return "sqrt";
1123 case kMathPowHalf:
1124 return "pow-half";
1125 case kMathClz32:
1126 return "clz32";
1127 default:
1128 UNREACHABLE();
1129 return NULL;
1130 }
1131}
1132
1133
1134Range* HUnaryMathOperation::InferRange(Zone* zone) {
1135 Representation r = representation();
1136 if (op() == kMathClz32) return new(zone) Range(0, 32);
1137 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1138 if (op() == kMathAbs) {
1139 int upper = value()->range()->upper();
1140 int lower = value()->range()->lower();
1141 bool spans_zero = value()->range()->CanBeZero();
1142 // Math.abs(kMinInt) overflows its representation, on which the
1143 // instruction deopts. Hence clamp it to kMaxInt.
1144 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1145 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1146 Range* result =
1147 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1148 Max(abs_lower, abs_upper));
1149 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1150 // Smi::kMaxValue.
1151 if (r.IsSmi()) result->ClampToSmi();
1152 return result;
1153 }
1154 }
1155 return HValue::InferRange(zone);
1156}
1157
1158
1159std::ostream& HUnaryMathOperation::PrintDataTo(
1160 std::ostream& os) const { // NOLINT
1161 return os << OpName() << " " << NameOf(value());
1162}
1163
1164
1165std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1166 return os << NameOf(value());
1167}
1168
1169
1170std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1171 std::ostream& os) const { // NOLINT
1172 os << NameOf(value());
1173 switch (from_) {
1174 case FIRST_JS_RECEIVER_TYPE:
1175 if (to_ == LAST_TYPE) os << " spec_object";
1176 break;
1177 case JS_REGEXP_TYPE:
1178 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1179 break;
1180 case JS_ARRAY_TYPE:
1181 if (to_ == JS_ARRAY_TYPE) os << " array";
1182 break;
1183 case JS_FUNCTION_TYPE:
1184 if (to_ == JS_FUNCTION_TYPE) os << " function";
1185 break;
1186 default:
1187 break;
1188 }
1189 return os;
1190}
1191
1192
1193std::ostream& HTypeofIsAndBranch::PrintDataTo(
1194 std::ostream& os) const { // NOLINT
1195 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1196 return HControlInstruction::PrintDataTo(os);
1197}
1198
1199
1200namespace {
1201
1202String* TypeOfString(HConstant* constant, Isolate* isolate) {
1203 Heap* heap = isolate->heap();
1204 if (constant->HasNumberValue()) return heap->number_string();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001205 if (constant->HasStringValue()) return heap->string_string();
1206 switch (constant->GetInstanceType()) {
1207 case ODDBALL_TYPE: {
1208 Unique<Object> unique = constant->GetUnique();
1209 if (unique.IsKnownGlobal(heap->true_value()) ||
1210 unique.IsKnownGlobal(heap->false_value())) {
1211 return heap->boolean_string();
1212 }
1213 if (unique.IsKnownGlobal(heap->null_value())) {
1214 return heap->object_string();
1215 }
1216 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1217 return heap->undefined_string();
1218 }
1219 case SYMBOL_TYPE:
1220 return heap->symbol_string();
1221 case SIMD128_VALUE_TYPE: {
1222 Unique<Map> map = constant->ObjectMap();
1223#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
1224 if (map.IsKnownGlobal(heap->type##_map())) { \
1225 return heap->type##_string(); \
1226 }
1227 SIMD128_TYPES(SIMD128_TYPE)
1228#undef SIMD128_TYPE
1229 UNREACHABLE();
1230 return nullptr;
1231 }
1232 default:
Ben Murdochc5610432016-08-08 18:44:38 +01001233 if (constant->IsUndetectable()) return heap->undefined_string();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001234 if (constant->IsCallable()) return heap->function_string();
1235 return heap->object_string();
1236 }
1237}
1238
1239} // namespace
1240
1241
1242bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1243 if (FLAG_fold_constants && value()->IsConstant()) {
1244 HConstant* constant = HConstant::cast(value());
1245 String* type_string = TypeOfString(constant, isolate());
1246 bool same_type = type_literal_.IsKnownGlobal(type_string);
1247 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1248 return true;
1249 } else if (value()->representation().IsSpecialization()) {
1250 bool number_type =
1251 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1252 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1253 return true;
1254 }
1255 *block = NULL;
1256 return false;
1257}
1258
1259
1260std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1261 return os << NameOf(value()) << " " << NameOf(map());
1262}
1263
1264
1265HValue* HCheckMapValue::Canonicalize() {
1266 if (map()->IsConstant()) {
1267 HConstant* c_map = HConstant::cast(map());
1268 return HCheckMaps::CreateAndInsertAfter(
1269 block()->graph()->zone(), value(), c_map->MapValue(),
1270 c_map->HasStableMapValue(), this);
1271 }
1272 return this;
1273}
1274
1275
1276std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1277 return os << NameOf(enumerable());
1278}
1279
1280
1281std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1282 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1283 << "]";
1284}
1285
1286
1287std::ostream& HLoadFieldByIndex::PrintDataTo(
1288 std::ostream& os) const { // NOLINT
1289 return os << NameOf(object()) << " " << NameOf(index());
1290}
1291
1292
1293static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1294 if (!l->EqualsInteger32Constant(~0)) return false;
1295 *negated = r;
1296 return true;
1297}
1298
1299
1300static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1301 if (!instr->IsBitwise()) return false;
1302 HBitwise* b = HBitwise::cast(instr);
1303 return (b->op() == Token::BIT_XOR) &&
1304 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1305 MatchLeftIsOnes(b->right(), b->left(), negated));
1306}
1307
1308
1309static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1310 HValue* negated;
1311 return MatchNegationViaXor(instr, &negated) &&
1312 MatchNegationViaXor(negated, arg);
1313}
1314
1315
1316HValue* HBitwise::Canonicalize() {
1317 if (!representation().IsSmiOrInteger32()) return this;
1318 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1319 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1320 if (left()->EqualsInteger32Constant(nop_constant) &&
1321 !right()->CheckFlag(kUint32)) {
1322 return right();
1323 }
1324 if (right()->EqualsInteger32Constant(nop_constant) &&
1325 !left()->CheckFlag(kUint32)) {
1326 return left();
1327 }
1328 // Optimize double negation, a common pattern used for ToInt32(x).
1329 HValue* arg;
1330 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1331 return arg;
1332 }
1333 return this;
1334}
1335
1336
1337// static
1338HInstruction* HAdd::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001339 HValue* left, HValue* right,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340 ExternalAddType external_add_type) {
1341 // For everything else, you should use the other factory method without
1342 // ExternalAddType.
1343 DCHECK_EQ(external_add_type, AddOfExternalAndTagged);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001344 return new (zone) HAdd(context, left, right, external_add_type);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001345}
1346
1347
1348Representation HAdd::RepresentationFromInputs() {
1349 Representation left_rep = left()->representation();
1350 if (left_rep.IsExternal()) {
1351 return Representation::External();
1352 }
1353 return HArithmeticBinaryOperation::RepresentationFromInputs();
1354}
1355
1356
1357Representation HAdd::RequiredInputRepresentation(int index) {
1358 if (index == 2) {
1359 Representation left_rep = left()->representation();
1360 if (left_rep.IsExternal()) {
1361 if (external_add_type_ == AddOfExternalAndTagged) {
1362 return Representation::Tagged();
1363 } else {
1364 return Representation::Integer32();
1365 }
1366 }
1367 }
1368 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1369}
1370
1371
1372static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1373 return arg1->representation().IsSpecialization() &&
1374 arg2->EqualsInteger32Constant(identity);
1375}
1376
1377
1378HValue* HAdd::Canonicalize() {
1379 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1380 if (IsIdentityOperation(left(), right(), 0) &&
1381 !left()->representation().IsDouble()) { // Left could be -0.
1382 return left();
1383 }
1384 if (IsIdentityOperation(right(), left(), 0) &&
1385 !left()->representation().IsDouble()) { // Right could be -0.
1386 return right();
1387 }
1388 return this;
1389}
1390
1391
1392HValue* HSub::Canonicalize() {
1393 if (IsIdentityOperation(left(), right(), 0)) return left();
1394 return this;
1395}
1396
1397
1398HValue* HMul::Canonicalize() {
1399 if (IsIdentityOperation(left(), right(), 1)) return left();
1400 if (IsIdentityOperation(right(), left(), 1)) return right();
1401 return this;
1402}
1403
1404
1405bool HMul::MulMinusOne() {
1406 if (left()->EqualsInteger32Constant(-1) ||
1407 right()->EqualsInteger32Constant(-1)) {
1408 return true;
1409 }
1410
1411 return false;
1412}
1413
1414
1415HValue* HMod::Canonicalize() {
1416 return this;
1417}
1418
1419
1420HValue* HDiv::Canonicalize() {
1421 if (IsIdentityOperation(left(), right(), 1)) return left();
1422 return this;
1423}
1424
1425
1426HValue* HChange::Canonicalize() {
1427 return (from().Equals(to())) ? value() : this;
1428}
1429
1430
1431HValue* HWrapReceiver::Canonicalize() {
1432 if (HasNoUses()) return NULL;
1433 if (receiver()->type().IsJSReceiver()) {
1434 return receiver();
1435 }
1436 return this;
1437}
1438
1439
1440std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1441 return os << NameOf(value());
1442}
1443
1444
1445HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1446 HValue* context, HValue* value,
1447 Representation representation) {
1448 if (FLAG_fold_constants && value->IsConstant()) {
1449 HConstant* c = HConstant::cast(value);
1450 c = c->CopyToRepresentation(representation, zone);
1451 if (c != NULL) return c;
1452 }
1453 return new(zone) HForceRepresentation(value, representation);
1454}
1455
1456
1457std::ostream& HForceRepresentation::PrintDataTo(
1458 std::ostream& os) const { // NOLINT
1459 return os << representation().Mnemonic() << " " << NameOf(value());
1460}
1461
1462
1463std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1464 HUnaryOperation::PrintDataTo(os);
1465 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1466
1467 if (CanTruncateToSmi()) os << " truncating-smi";
1468 if (CanTruncateToInt32()) os << " truncating-int32";
1469 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1470 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1471 return os;
1472}
1473
1474
1475HValue* HUnaryMathOperation::Canonicalize() {
1476 if (op() == kMathRound || op() == kMathFloor) {
1477 HValue* val = value();
1478 if (val->IsChange()) val = HChange::cast(val)->value();
1479 if (val->representation().IsSmiOrInteger32()) {
1480 if (val->representation().Equals(representation())) return val;
1481 return Prepend(new(block()->zone()) HChange(
1482 val, representation(), false, false));
1483 }
1484 }
Ben Murdochda12d292016-06-02 14:46:10 +01001485 if (op() == kMathFloor && representation().IsSmiOrInteger32() &&
1486 value()->IsDiv() && value()->HasOneUse()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001487 HDiv* hdiv = HDiv::cast(value());
1488
1489 HValue* left = hdiv->left();
1490 if (left->representation().IsInteger32() && !left->CheckFlag(kUint32)) {
1491 // A value with an integer representation does not need to be transformed.
1492 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32() &&
1493 !HChange::cast(left)->value()->CheckFlag(kUint32)) {
1494 // A change from an integer32 can be replaced by the integer32 value.
1495 left = HChange::cast(left)->value();
1496 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1497 left = Prepend(new(block()->zone()) HChange(
1498 left, Representation::Integer32(), false, false));
1499 } else {
1500 return this;
1501 }
1502
1503 HValue* right = hdiv->right();
1504 if (right->IsInteger32Constant()) {
1505 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1506 Representation::Integer32(), right->block()->zone()));
1507 } else if (right->representation().IsInteger32() &&
1508 !right->CheckFlag(kUint32)) {
1509 // A value with an integer representation does not need to be transformed.
1510 } else if (right->IsChange() &&
1511 HChange::cast(right)->from().IsInteger32() &&
1512 !HChange::cast(right)->value()->CheckFlag(kUint32)) {
1513 // A change from an integer32 can be replaced by the integer32 value.
1514 right = HChange::cast(right)->value();
1515 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1516 right = Prepend(new(block()->zone()) HChange(
1517 right, Representation::Integer32(), false, false));
1518 } else {
1519 return this;
1520 }
1521
1522 return Prepend(HMathFloorOfDiv::New(
1523 block()->graph()->isolate(), block()->zone(), context(), left, right));
1524 }
1525 return this;
1526}
1527
1528
1529HValue* HCheckInstanceType::Canonicalize() {
1530 if ((check_ == IS_JS_RECEIVER && value()->type().IsJSReceiver()) ||
1531 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1532 (check_ == IS_STRING && value()->type().IsString())) {
1533 return value();
1534 }
1535
1536 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1537 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1538 return value();
1539 }
1540 }
1541 return this;
1542}
1543
1544
1545void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1546 InstanceType* last) {
1547 DCHECK(is_interval_check());
1548 switch (check_) {
1549 case IS_JS_RECEIVER:
1550 *first = FIRST_JS_RECEIVER_TYPE;
1551 *last = LAST_JS_RECEIVER_TYPE;
1552 return;
1553 case IS_JS_ARRAY:
1554 *first = *last = JS_ARRAY_TYPE;
1555 return;
1556 case IS_JS_DATE:
1557 *first = *last = JS_DATE_TYPE;
1558 return;
1559 default:
1560 UNREACHABLE();
1561 }
1562}
1563
1564
1565void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1566 DCHECK(!is_interval_check());
1567 switch (check_) {
1568 case IS_STRING:
1569 *mask = kIsNotStringMask;
1570 *tag = kStringTag;
1571 return;
1572 case IS_INTERNALIZED_STRING:
1573 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1574 *tag = kInternalizedTag;
1575 return;
1576 default:
1577 UNREACHABLE();
1578 }
1579}
1580
1581
1582std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1583 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1584 for (int i = 1; i < maps()->size(); ++i) {
1585 os << "," << *maps()->at(i).handle();
1586 }
1587 os << "]";
1588 if (IsStabilityCheck()) os << "(stability-check)";
1589 return os;
1590}
1591
1592
1593HValue* HCheckMaps::Canonicalize() {
1594 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1595 HConstant* c_value = HConstant::cast(value());
1596 if (c_value->HasObjectMap()) {
1597 for (int i = 0; i < maps()->size(); ++i) {
1598 if (c_value->ObjectMap() == maps()->at(i)) {
1599 if (maps()->size() > 1) {
1600 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1601 maps()->at(i), block()->graph()->zone()));
1602 }
1603 MarkAsStabilityCheck();
1604 break;
1605 }
1606 }
1607 }
1608 }
1609 return this;
1610}
1611
1612
1613std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1614 return os << NameOf(value()) << " " << Brief(*object().handle());
1615}
1616
1617
1618HValue* HCheckValue::Canonicalize() {
1619 return (value()->IsConstant() &&
1620 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1621}
1622
1623
1624const char* HCheckInstanceType::GetCheckName() const {
1625 switch (check_) {
1626 case IS_JS_RECEIVER: return "object";
1627 case IS_JS_ARRAY: return "array";
1628 case IS_JS_DATE:
1629 return "date";
1630 case IS_STRING: return "string";
1631 case IS_INTERNALIZED_STRING: return "internalized_string";
1632 }
1633 UNREACHABLE();
1634 return "";
1635}
1636
1637
1638std::ostream& HCheckInstanceType::PrintDataTo(
1639 std::ostream& os) const { // NOLINT
1640 os << GetCheckName() << " ";
1641 return HUnaryOperation::PrintDataTo(os);
1642}
1643
1644
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001645std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1646 const char* type = "expression";
1647 if (environment_->is_local_index(index_)) type = "local";
1648 if (environment_->is_special_index(index_)) type = "special";
1649 if (environment_->is_parameter_index(index_)) type = "parameter";
1650 return os << type << " @ " << index_;
1651}
1652
1653
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654Range* HValue::InferRange(Zone* zone) {
1655 Range* result;
1656 if (representation().IsSmi() || type().IsSmi()) {
1657 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1658 result->set_can_be_minus_zero(false);
1659 } else {
1660 result = new(zone) Range();
1661 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1662 // TODO(jkummerow): The range cannot be minus zero when the upper type
1663 // bound is Integer32.
1664 }
1665 return result;
1666}
1667
1668
1669Range* HChange::InferRange(Zone* zone) {
1670 Range* input_range = value()->range();
1671 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1672 (to().IsSmi() ||
1673 (to().IsTagged() &&
1674 input_range != NULL &&
1675 input_range->IsInSmiRange()))) {
1676 set_type(HType::Smi());
1677 ClearChangesFlag(kNewSpacePromotion);
1678 }
1679 if (to().IsSmiOrTagged() &&
1680 input_range != NULL &&
1681 input_range->IsInSmiRange() &&
1682 (!SmiValuesAre32Bits() ||
1683 !value()->CheckFlag(HValue::kUint32) ||
1684 input_range->upper() != kMaxInt)) {
1685 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1686 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1687 ClearFlag(kCanOverflow);
1688 }
1689 Range* result = (input_range != NULL)
1690 ? input_range->Copy(zone)
1691 : HValue::InferRange(zone);
1692 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1693 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1694 CheckFlag(kAllUsesTruncatingToSmi)));
1695 if (to().IsSmi()) result->ClampToSmi();
1696 return result;
1697}
1698
1699
1700Range* HConstant::InferRange(Zone* zone) {
1701 if (HasInteger32Value()) {
1702 Range* result = new(zone) Range(int32_value_, int32_value_);
1703 result->set_can_be_minus_zero(false);
1704 return result;
1705 }
1706 return HValue::InferRange(zone);
1707}
1708
1709
1710SourcePosition HPhi::position() const { return block()->first()->position(); }
1711
1712
1713Range* HPhi::InferRange(Zone* zone) {
1714 Representation r = representation();
1715 if (r.IsSmiOrInteger32()) {
1716 if (block()->IsLoopHeader()) {
1717 Range* range = r.IsSmi()
1718 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1719 : new(zone) Range(kMinInt, kMaxInt);
1720 return range;
1721 } else {
1722 Range* range = OperandAt(0)->range()->Copy(zone);
1723 for (int i = 1; i < OperandCount(); ++i) {
1724 range->Union(OperandAt(i)->range());
1725 }
1726 return range;
1727 }
1728 } else {
1729 return HValue::InferRange(zone);
1730 }
1731}
1732
1733
1734Range* HAdd::InferRange(Zone* zone) {
1735 Representation r = representation();
1736 if (r.IsSmiOrInteger32()) {
1737 Range* a = left()->range();
1738 Range* b = right()->range();
1739 Range* res = a->Copy(zone);
1740 if (!res->AddAndCheckOverflow(r, b) ||
1741 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1742 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1743 ClearFlag(kCanOverflow);
1744 }
1745 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1746 !CheckFlag(kAllUsesTruncatingToInt32) &&
1747 a->CanBeMinusZero() && b->CanBeMinusZero());
1748 return res;
1749 } else {
1750 return HValue::InferRange(zone);
1751 }
1752}
1753
1754
1755Range* HSub::InferRange(Zone* zone) {
1756 Representation r = representation();
1757 if (r.IsSmiOrInteger32()) {
1758 Range* a = left()->range();
1759 Range* b = right()->range();
1760 Range* res = a->Copy(zone);
1761 if (!res->SubAndCheckOverflow(r, b) ||
1762 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1763 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1764 ClearFlag(kCanOverflow);
1765 }
1766 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1767 !CheckFlag(kAllUsesTruncatingToInt32) &&
1768 a->CanBeMinusZero() && b->CanBeZero());
1769 return res;
1770 } else {
1771 return HValue::InferRange(zone);
1772 }
1773}
1774
1775
1776Range* HMul::InferRange(Zone* zone) {
1777 Representation r = representation();
1778 if (r.IsSmiOrInteger32()) {
1779 Range* a = left()->range();
1780 Range* b = right()->range();
1781 Range* res = a->Copy(zone);
1782 if (!res->MulAndCheckOverflow(r, b) ||
1783 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1784 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1785 MulMinusOne())) {
1786 // Truncated int multiplication is too precise and therefore not the
1787 // same as converting to Double and back.
1788 // Handle truncated integer multiplication by -1 special.
1789 ClearFlag(kCanOverflow);
1790 }
1791 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1792 !CheckFlag(kAllUsesTruncatingToInt32) &&
1793 ((a->CanBeZero() && b->CanBeNegative()) ||
1794 (a->CanBeNegative() && b->CanBeZero())));
1795 return res;
1796 } else {
1797 return HValue::InferRange(zone);
1798 }
1799}
1800
1801
1802Range* HDiv::InferRange(Zone* zone) {
1803 if (representation().IsInteger32()) {
1804 Range* a = left()->range();
1805 Range* b = right()->range();
1806 Range* result = new(zone) Range();
1807 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1808 (a->CanBeMinusZero() ||
1809 (a->CanBeZero() && b->CanBeNegative())));
1810 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1811 ClearFlag(kCanOverflow);
1812 }
1813
1814 if (!b->CanBeZero()) {
1815 ClearFlag(kCanBeDivByZero);
1816 }
1817 return result;
1818 } else {
1819 return HValue::InferRange(zone);
1820 }
1821}
1822
1823
1824Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1825 if (representation().IsInteger32()) {
1826 Range* a = left()->range();
1827 Range* b = right()->range();
1828 Range* result = new(zone) Range();
1829 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1830 (a->CanBeMinusZero() ||
1831 (a->CanBeZero() && b->CanBeNegative())));
1832 if (!a->Includes(kMinInt)) {
1833 ClearFlag(kLeftCanBeMinInt);
1834 }
1835
1836 if (!a->CanBeNegative()) {
1837 ClearFlag(HValue::kLeftCanBeNegative);
1838 }
1839
1840 if (!a->CanBePositive()) {
1841 ClearFlag(HValue::kLeftCanBePositive);
1842 }
1843
1844 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1845 ClearFlag(kCanOverflow);
1846 }
1847
1848 if (!b->CanBeZero()) {
1849 ClearFlag(kCanBeDivByZero);
1850 }
1851 return result;
1852 } else {
1853 return HValue::InferRange(zone);
1854 }
1855}
1856
1857
1858// Returns the absolute value of its argument minus one, avoiding undefined
1859// behavior at kMinInt.
1860static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1861
1862
1863Range* HMod::InferRange(Zone* zone) {
1864 if (representation().IsInteger32()) {
1865 Range* a = left()->range();
1866 Range* b = right()->range();
1867
1868 // The magnitude of the modulus is bounded by the right operand.
1869 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1870
1871 // The result of the modulo operation has the sign of its left operand.
1872 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1873 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1874 a->CanBePositive() ? positive_bound : 0);
1875
1876 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1877 left_can_be_negative);
1878
1879 if (!a->CanBeNegative()) {
1880 ClearFlag(HValue::kLeftCanBeNegative);
1881 }
1882
1883 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1884 ClearFlag(HValue::kCanOverflow);
1885 }
1886
1887 if (!b->CanBeZero()) {
1888 ClearFlag(HValue::kCanBeDivByZero);
1889 }
1890 return result;
1891 } else {
1892 return HValue::InferRange(zone);
1893 }
1894}
1895
1896
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001897Range* HMathMinMax::InferRange(Zone* zone) {
1898 if (representation().IsSmiOrInteger32()) {
1899 Range* a = left()->range();
1900 Range* b = right()->range();
1901 Range* res = a->Copy(zone);
1902 if (operation_ == kMathMax) {
1903 res->CombinedMax(b);
1904 } else {
1905 DCHECK(operation_ == kMathMin);
1906 res->CombinedMin(b);
1907 }
1908 return res;
1909 } else {
1910 return HValue::InferRange(zone);
1911 }
1912}
1913
1914
1915void HPushArguments::AddInput(HValue* value) {
1916 inputs_.Add(NULL, value->block()->zone());
1917 SetOperandAt(OperandCount() - 1, value);
1918}
1919
1920
1921std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
1922 os << "[";
1923 for (int i = 0; i < OperandCount(); ++i) {
1924 os << " " << NameOf(OperandAt(i)) << " ";
1925 }
1926 return os << " uses" << UseCount()
1927 << representation_from_indirect_uses().Mnemonic() << " "
1928 << TypeOf(this) << "]";
1929}
1930
1931
1932void HPhi::AddInput(HValue* value) {
1933 inputs_.Add(NULL, value->block()->zone());
1934 SetOperandAt(OperandCount() - 1, value);
1935 // Mark phis that may have 'arguments' directly or indirectly as an operand.
1936 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
1937 SetFlag(kIsArguments);
1938 }
1939}
1940
1941
1942bool HPhi::HasRealUses() {
1943 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
1944 if (!it.value()->IsPhi()) return true;
1945 }
1946 return false;
1947}
1948
1949
1950HValue* HPhi::GetRedundantReplacement() {
1951 HValue* candidate = NULL;
1952 int count = OperandCount();
1953 int position = 0;
1954 while (position < count && candidate == NULL) {
1955 HValue* current = OperandAt(position++);
1956 if (current != this) candidate = current;
1957 }
1958 while (position < count) {
1959 HValue* current = OperandAt(position++);
1960 if (current != this && current != candidate) return NULL;
1961 }
1962 DCHECK(candidate != this);
1963 return candidate;
1964}
1965
1966
1967void HPhi::DeleteFromGraph() {
1968 DCHECK(block() != NULL);
1969 block()->RemovePhi(this);
1970 DCHECK(block() == NULL);
1971}
1972
1973
1974void HPhi::InitRealUses(int phi_id) {
1975 // Initialize real uses.
1976 phi_id_ = phi_id;
1977 // Compute a conservative approximation of truncating uses before inferring
1978 // representations. The proper, exact computation will be done later, when
1979 // inserting representation changes.
1980 SetFlag(kTruncatingToSmi);
1981 SetFlag(kTruncatingToInt32);
1982 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
1983 HValue* value = it.value();
1984 if (!value->IsPhi()) {
1985 Representation rep = value->observed_input_representation(it.index());
1986 representation_from_non_phi_uses_ =
1987 representation_from_non_phi_uses().generalize(rep);
1988 if (rep.IsSmi() || rep.IsInteger32() || rep.IsDouble()) {
1989 has_type_feedback_from_uses_ = true;
1990 }
1991
1992 if (FLAG_trace_representation) {
1993 PrintF("#%d Phi is used by real #%d %s as %s\n",
1994 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
1995 }
1996 if (!value->IsSimulate()) {
1997 if (!value->CheckFlag(kTruncatingToSmi)) {
1998 ClearFlag(kTruncatingToSmi);
1999 }
2000 if (!value->CheckFlag(kTruncatingToInt32)) {
2001 ClearFlag(kTruncatingToInt32);
2002 }
2003 }
2004 }
2005 }
2006}
2007
2008
2009void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2010 if (FLAG_trace_representation) {
2011 PrintF(
2012 "generalizing use representation '%s' of #%d Phi "
2013 "with uses of #%d Phi '%s'\n",
2014 representation_from_indirect_uses().Mnemonic(), id(), other->id(),
2015 other->representation_from_non_phi_uses().Mnemonic());
2016 }
2017
2018 representation_from_indirect_uses_ =
2019 representation_from_indirect_uses().generalize(
2020 other->representation_from_non_phi_uses());
2021}
2022
2023
2024void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2025 while (!list->is_empty()) {
2026 HSimulate* from = list->RemoveLast();
2027 ZoneList<HValue*>* from_values = &from->values_;
2028 for (int i = 0; i < from_values->length(); ++i) {
2029 if (from->HasAssignedIndexAt(i)) {
2030 int index = from->GetAssignedIndexAt(i);
2031 if (HasValueForIndex(index)) continue;
2032 AddAssignedValue(index, from_values->at(i));
2033 } else {
2034 if (pop_count_ > 0) {
2035 pop_count_--;
2036 } else {
2037 AddPushedValue(from_values->at(i));
2038 }
2039 }
2040 }
2041 pop_count_ += from->pop_count_;
2042 from->DeleteAndReplaceWith(NULL);
2043 }
2044}
2045
2046
2047std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2048 os << "id=" << ast_id().ToInt();
2049 if (pop_count_ > 0) os << " pop " << pop_count_;
2050 if (values_.length() > 0) {
2051 if (pop_count_ > 0) os << " /";
2052 for (int i = values_.length() - 1; i >= 0; --i) {
2053 if (HasAssignedIndexAt(i)) {
2054 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2055 } else {
2056 os << " push ";
2057 }
2058 os << NameOf(values_[i]);
2059 if (i > 0) os << ",";
2060 }
2061 }
2062 return os;
2063}
2064
2065
2066void HSimulate::ReplayEnvironment(HEnvironment* env) {
2067 if (is_done_with_replay()) return;
2068 DCHECK(env != NULL);
2069 env->set_ast_id(ast_id());
2070 env->Drop(pop_count());
2071 for (int i = values()->length() - 1; i >= 0; --i) {
2072 HValue* value = values()->at(i);
2073 if (HasAssignedIndexAt(i)) {
2074 env->Bind(GetAssignedIndexAt(i), value);
2075 } else {
2076 env->Push(value);
2077 }
2078 }
2079 set_done_with_replay();
2080}
2081
2082
2083static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2084 HCapturedObject* other) {
2085 for (int i = 0; i < values->length(); ++i) {
2086 HValue* value = values->at(i);
2087 if (value->IsCapturedObject()) {
2088 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2089 values->at(i) = other;
2090 } else {
2091 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2092 }
2093 }
2094 }
2095}
2096
2097
2098// Replay captured objects by replacing all captured objects with the
2099// same capture id in the current and all outer environments.
2100void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2101 DCHECK(env != NULL);
2102 while (env != NULL) {
2103 ReplayEnvironmentNested(env->values(), this);
2104 env = env->outer();
2105 }
2106}
2107
2108
2109std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2110 os << "#" << capture_id() << " ";
2111 return HDematerializedObject::PrintDataTo(os);
2112}
2113
2114
2115void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2116 Zone* zone) {
2117 DCHECK(return_target->IsInlineReturnTarget());
2118 return_targets_.Add(return_target, zone);
2119}
2120
2121
2122std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +01002123 os << function()->debug_name()->ToCString().get();
2124 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
2125 os << ", JSTailCall";
2126 }
2127 return os;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002128}
2129
2130
2131static bool IsInteger32(double value) {
2132 if (value >= std::numeric_limits<int32_t>::min() &&
2133 value <= std::numeric_limits<int32_t>::max()) {
2134 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2135 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2136 }
2137 return false;
2138}
2139
2140
2141HConstant::HConstant(Special special)
2142 : HTemplateInstruction<0>(HType::TaggedNumber()),
2143 object_(Handle<Object>::null()),
2144 object_map_(Handle<Map>::null()),
2145 bit_field_(HasDoubleValueField::encode(true) |
2146 InstanceTypeField::encode(kUnknownInstanceType)),
2147 int32_value_(0) {
2148 DCHECK_EQ(kHoleNaN, special);
2149 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2150 Initialize(Representation::Double());
2151}
2152
2153
2154HConstant::HConstant(Handle<Object> object, Representation r)
2155 : HTemplateInstruction<0>(HType::FromValue(object)),
2156 object_(Unique<Object>::CreateUninitialized(object)),
2157 object_map_(Handle<Map>::null()),
2158 bit_field_(
2159 HasStableMapValueField::encode(false) |
2160 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2161 HasDoubleValueField::encode(false) |
2162 HasExternalReferenceValueField::encode(false) |
2163 IsNotInNewSpaceField::encode(true) |
2164 BooleanValueField::encode(object->BooleanValue()) |
2165 IsUndetectableField::encode(false) | IsCallableField::encode(false) |
2166 InstanceTypeField::encode(kUnknownInstanceType)) {
2167 if (object->IsHeapObject()) {
2168 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2169 Isolate* isolate = heap_object->GetIsolate();
2170 Handle<Map> map(heap_object->map(), isolate);
2171 bit_field_ = IsNotInNewSpaceField::update(
2172 bit_field_, !isolate->heap()->InNewSpace(*object));
2173 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2174 bit_field_ =
2175 IsUndetectableField::update(bit_field_, map->is_undetectable());
2176 bit_field_ = IsCallableField::update(bit_field_, map->is_callable());
2177 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2178 bit_field_ = HasStableMapValueField::update(
2179 bit_field_,
2180 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2181 }
2182 if (object->IsNumber()) {
2183 double n = object->Number();
2184 bool has_int32_value = IsInteger32(n);
2185 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2186 int32_value_ = DoubleToInt32(n);
2187 bit_field_ = HasSmiValueField::update(
2188 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2189 double_value_ = n;
2190 bit_field_ = HasDoubleValueField::update(bit_field_, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002191 }
2192
2193 Initialize(r);
2194}
2195
2196
2197HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2198 bool has_stable_map_value, Representation r, HType type,
2199 bool is_not_in_new_space, bool boolean_value,
2200 bool is_undetectable, InstanceType instance_type)
2201 : HTemplateInstruction<0>(type),
2202 object_(object),
2203 object_map_(object_map),
2204 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2205 HasSmiValueField::encode(false) |
2206 HasInt32ValueField::encode(false) |
2207 HasDoubleValueField::encode(false) |
2208 HasExternalReferenceValueField::encode(false) |
2209 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2210 BooleanValueField::encode(boolean_value) |
2211 IsUndetectableField::encode(is_undetectable) |
2212 InstanceTypeField::encode(instance_type)) {
2213 DCHECK(!object.handle().is_null());
2214 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2215 Initialize(r);
2216}
2217
2218
2219HConstant::HConstant(int32_t integer_value, Representation r,
2220 bool is_not_in_new_space, Unique<Object> object)
2221 : object_(object),
2222 object_map_(Handle<Map>::null()),
2223 bit_field_(HasStableMapValueField::encode(false) |
2224 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2225 HasInt32ValueField::encode(true) |
2226 HasDoubleValueField::encode(true) |
2227 HasExternalReferenceValueField::encode(false) |
2228 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2229 BooleanValueField::encode(integer_value != 0) |
2230 IsUndetectableField::encode(false) |
2231 InstanceTypeField::encode(kUnknownInstanceType)),
2232 int32_value_(integer_value),
2233 double_value_(FastI2D(integer_value)) {
2234 // It's possible to create a constant with a value in Smi-range but stored
2235 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2236 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2237 bool is_smi = HasSmiValue() && !could_be_heapobject;
2238 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2239 Initialize(r);
2240}
2241
2242
2243HConstant::HConstant(double double_value, Representation r,
2244 bool is_not_in_new_space, Unique<Object> object)
2245 : object_(object),
2246 object_map_(Handle<Map>::null()),
2247 bit_field_(HasStableMapValueField::encode(false) |
2248 HasInt32ValueField::encode(IsInteger32(double_value)) |
2249 HasDoubleValueField::encode(true) |
2250 HasExternalReferenceValueField::encode(false) |
2251 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2252 BooleanValueField::encode(double_value != 0 &&
2253 !std::isnan(double_value)) |
2254 IsUndetectableField::encode(false) |
2255 InstanceTypeField::encode(kUnknownInstanceType)),
2256 int32_value_(DoubleToInt32(double_value)),
2257 double_value_(double_value) {
2258 bit_field_ = HasSmiValueField::update(
2259 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2260 // It's possible to create a constant with a value in Smi-range but stored
2261 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2262 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2263 bool is_smi = HasSmiValue() && !could_be_heapobject;
2264 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2265 Initialize(r);
2266}
2267
2268
2269HConstant::HConstant(ExternalReference reference)
2270 : HTemplateInstruction<0>(HType::Any()),
2271 object_(Unique<Object>(Handle<Object>::null())),
2272 object_map_(Handle<Map>::null()),
2273 bit_field_(
2274 HasStableMapValueField::encode(false) |
2275 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2276 HasDoubleValueField::encode(false) |
2277 HasExternalReferenceValueField::encode(true) |
2278 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2279 IsUndetectableField::encode(false) |
2280 InstanceTypeField::encode(kUnknownInstanceType)),
2281 external_reference_value_(reference) {
2282 Initialize(Representation::External());
2283}
2284
2285
2286void HConstant::Initialize(Representation r) {
2287 if (r.IsNone()) {
2288 if (HasSmiValue() && SmiValuesAre31Bits()) {
2289 r = Representation::Smi();
2290 } else if (HasInteger32Value()) {
2291 r = Representation::Integer32();
2292 } else if (HasDoubleValue()) {
2293 r = Representation::Double();
2294 } else if (HasExternalReferenceValue()) {
2295 r = Representation::External();
2296 } else {
2297 Handle<Object> object = object_.handle();
2298 if (object->IsJSObject()) {
2299 // Try to eagerly migrate JSObjects that have deprecated maps.
2300 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2301 if (js_object->map()->is_deprecated()) {
2302 JSObject::TryMigrateInstance(js_object);
2303 }
2304 }
2305 r = Representation::Tagged();
2306 }
2307 }
2308 if (r.IsSmi()) {
2309 // If we have an existing handle, zap it, because it might be a heap
2310 // number which we must not re-use when copying this HConstant to
2311 // Tagged representation later, because having Smi representation now
2312 // could cause heap object checks not to get emitted.
2313 object_ = Unique<Object>(Handle<Object>::null());
2314 }
2315 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2316 // If it's not a heap object, it can't be in new space.
2317 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2318 }
2319 set_representation(r);
2320 SetFlag(kUseGVN);
2321}
2322
2323
2324bool HConstant::ImmortalImmovable() const {
2325 if (HasInteger32Value()) {
2326 return false;
2327 }
2328 if (HasDoubleValue()) {
2329 if (IsSpecialDouble()) {
2330 return true;
2331 }
2332 return false;
2333 }
2334 if (HasExternalReferenceValue()) {
2335 return false;
2336 }
2337
2338 DCHECK(!object_.handle().is_null());
2339 Heap* heap = isolate()->heap();
2340 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2341 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2342 return
2343#define IMMORTAL_IMMOVABLE_ROOT(name) \
2344 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2345 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2346#undef IMMORTAL_IMMOVABLE_ROOT
2347#define INTERNALIZED_STRING(name, value) \
2348 object_.IsKnownGlobal(heap->name()) ||
2349 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2350#undef INTERNALIZED_STRING
2351#define STRING_TYPE(NAME, size, name, Name) \
2352 object_.IsKnownGlobal(heap->name##_map()) ||
2353 STRING_TYPE_LIST(STRING_TYPE)
2354#undef STRING_TYPE
2355 false;
2356}
2357
2358
2359bool HConstant::EmitAtUses() {
2360 DCHECK(IsLinked());
2361 if (block()->graph()->has_osr() &&
2362 block()->graph()->IsStandardConstant(this)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002363 return true;
2364 }
2365 if (HasNoUses()) return true;
2366 if (IsCell()) return false;
2367 if (representation().IsDouble()) return false;
2368 if (representation().IsExternal()) return false;
2369 return true;
2370}
2371
2372
2373HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2374 if (r.IsSmi() && !HasSmiValue()) return NULL;
2375 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2376 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2377 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2378 if (HasInteger32Value()) {
2379 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2380 }
2381 if (HasDoubleValue()) {
2382 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2383 }
2384 if (HasExternalReferenceValue()) {
2385 return new(zone) HConstant(external_reference_value_);
2386 }
2387 DCHECK(!object_.handle().is_null());
2388 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2389 type_, NotInNewSpace(), BooleanValue(),
2390 IsUndetectable(), GetInstanceType());
2391}
2392
2393
2394Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2395 HConstant* res = NULL;
2396 if (HasInteger32Value()) {
2397 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2398 NotInNewSpace(), object_);
2399 } else if (HasDoubleValue()) {
2400 res = new (zone)
2401 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2402 NotInNewSpace(), object_);
2403 }
2404 return res != NULL ? Just(res) : Nothing<HConstant*>();
2405}
2406
2407
2408Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2409 Zone* zone) {
2410 HConstant* res = NULL;
2411 Handle<Object> handle = this->handle(isolate);
2412 if (handle->IsBoolean()) {
2413 res = handle->BooleanValue() ?
2414 new(zone) HConstant(1) : new(zone) HConstant(0);
2415 } else if (handle->IsUndefined()) {
2416 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
2417 } else if (handle->IsNull()) {
2418 res = new(zone) HConstant(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002419 } else if (handle->IsString()) {
2420 res = new(zone) HConstant(String::ToNumber(Handle<String>::cast(handle)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002421 }
2422 return res != NULL ? Just(res) : Nothing<HConstant*>();
2423}
2424
2425
2426std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2427 if (HasInteger32Value()) {
2428 os << int32_value_ << " ";
2429 } else if (HasDoubleValue()) {
2430 os << double_value_ << " ";
2431 } else if (HasExternalReferenceValue()) {
2432 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2433 } else {
2434 // The handle() method is silently and lazily mutating the object.
2435 Handle<Object> h = const_cast<HConstant*>(this)->handle(isolate());
2436 os << Brief(*h) << " ";
2437 if (HasStableMapValue()) os << "[stable-map] ";
2438 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2439 }
2440 if (!NotInNewSpace()) os << "[new space] ";
2441 return os;
2442}
2443
2444
2445std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2446 os << NameOf(left()) << " " << NameOf(right());
2447 if (CheckFlag(kCanOverflow)) os << " !";
2448 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2449 return os;
2450}
2451
2452
2453void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2454 DCHECK(CheckFlag(kFlexibleRepresentation));
2455 Representation new_rep = RepresentationFromInputs();
2456 UpdateRepresentation(new_rep, h_infer, "inputs");
2457
2458 if (representation().IsSmi() && HasNonSmiUse()) {
2459 UpdateRepresentation(
2460 Representation::Integer32(), h_infer, "use requirements");
2461 }
2462
2463 if (observed_output_representation_.IsNone()) {
2464 new_rep = RepresentationFromUses();
2465 UpdateRepresentation(new_rep, h_infer, "uses");
2466 } else {
2467 new_rep = RepresentationFromOutput();
2468 UpdateRepresentation(new_rep, h_infer, "output");
2469 }
2470}
2471
2472
2473Representation HBinaryOperation::RepresentationFromInputs() {
2474 // Determine the worst case of observed input representations and
2475 // the currently assumed output representation.
2476 Representation rep = representation();
2477 for (int i = 1; i <= 2; ++i) {
2478 rep = rep.generalize(observed_input_representation(i));
2479 }
2480 // If any of the actual input representation is more general than what we
2481 // have so far but not Tagged, use that representation instead.
2482 Representation left_rep = left()->representation();
2483 Representation right_rep = right()->representation();
2484 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2485 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2486
2487 return rep;
2488}
2489
2490
2491bool HBinaryOperation::IgnoreObservedOutputRepresentation(
2492 Representation current_rep) {
2493 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
2494 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
2495 // Mul in Integer32 mode would be too precise.
2496 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
2497}
2498
2499
2500Representation HBinaryOperation::RepresentationFromOutput() {
2501 Representation rep = representation();
2502 // Consider observed output representation, but ignore it if it's Double,
2503 // this instruction is not a division, and all its uses are truncating
2504 // to Integer32.
2505 if (observed_output_representation_.is_more_general_than(rep) &&
2506 !IgnoreObservedOutputRepresentation(rep)) {
2507 return observed_output_representation_;
2508 }
2509 return Representation::None();
2510}
2511
2512
2513void HBinaryOperation::AssumeRepresentation(Representation r) {
2514 set_observed_input_representation(1, r);
2515 set_observed_input_representation(2, r);
2516 HValue::AssumeRepresentation(r);
2517}
2518
2519
2520void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
2521 DCHECK(CheckFlag(kFlexibleRepresentation));
2522 Representation new_rep = RepresentationFromInputs();
2523 UpdateRepresentation(new_rep, h_infer, "inputs");
2524 // Do not care about uses.
2525}
2526
2527
2528Range* HBitwise::InferRange(Zone* zone) {
2529 if (op() == Token::BIT_XOR) {
2530 if (left()->HasRange() && right()->HasRange()) {
2531 // The maximum value has the high bit, and all bits below, set:
2532 // (1 << high) - 1.
2533 // If the range can be negative, the minimum int is a negative number with
2534 // the high bit, and all bits below, unset:
2535 // -(1 << high).
2536 // If it cannot be negative, conservatively choose 0 as minimum int.
2537 int64_t left_upper = left()->range()->upper();
2538 int64_t left_lower = left()->range()->lower();
2539 int64_t right_upper = right()->range()->upper();
2540 int64_t right_lower = right()->range()->lower();
2541
2542 if (left_upper < 0) left_upper = ~left_upper;
2543 if (left_lower < 0) left_lower = ~left_lower;
2544 if (right_upper < 0) right_upper = ~right_upper;
2545 if (right_lower < 0) right_lower = ~right_lower;
2546
2547 int high = MostSignificantBit(
2548 static_cast<uint32_t>(
2549 left_upper | left_lower | right_upper | right_lower));
2550
2551 int64_t limit = 1;
2552 limit <<= high;
2553 int32_t min = (left()->range()->CanBeNegative() ||
2554 right()->range()->CanBeNegative())
2555 ? static_cast<int32_t>(-limit) : 0;
2556 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
2557 }
2558 Range* result = HValue::InferRange(zone);
2559 result->set_can_be_minus_zero(false);
2560 return result;
2561 }
2562 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
2563 int32_t left_mask = (left()->range() != NULL)
2564 ? left()->range()->Mask()
2565 : kDefaultMask;
2566 int32_t right_mask = (right()->range() != NULL)
2567 ? right()->range()->Mask()
2568 : kDefaultMask;
2569 int32_t result_mask = (op() == Token::BIT_AND)
2570 ? left_mask & right_mask
2571 : left_mask | right_mask;
2572 if (result_mask >= 0) return new(zone) Range(0, result_mask);
2573
2574 Range* result = HValue::InferRange(zone);
2575 result->set_can_be_minus_zero(false);
2576 return result;
2577}
2578
2579
2580Range* HSar::InferRange(Zone* zone) {
2581 if (right()->IsConstant()) {
2582 HConstant* c = HConstant::cast(right());
2583 if (c->HasInteger32Value()) {
2584 Range* result = (left()->range() != NULL)
2585 ? left()->range()->Copy(zone)
2586 : new(zone) Range();
2587 result->Sar(c->Integer32Value());
2588 return result;
2589 }
2590 }
2591 return HValue::InferRange(zone);
2592}
2593
2594
2595Range* HShr::InferRange(Zone* zone) {
2596 if (right()->IsConstant()) {
2597 HConstant* c = HConstant::cast(right());
2598 if (c->HasInteger32Value()) {
2599 int shift_count = c->Integer32Value() & 0x1f;
2600 if (left()->range()->CanBeNegative()) {
2601 // Only compute bounds if the result always fits into an int32.
2602 return (shift_count >= 1)
2603 ? new(zone) Range(0,
2604 static_cast<uint32_t>(0xffffffff) >> shift_count)
2605 : new(zone) Range();
2606 } else {
2607 // For positive inputs we can use the >> operator.
2608 Range* result = (left()->range() != NULL)
2609 ? left()->range()->Copy(zone)
2610 : new(zone) Range();
2611 result->Sar(c->Integer32Value());
2612 return result;
2613 }
2614 }
2615 }
2616 return HValue::InferRange(zone);
2617}
2618
2619
2620Range* HShl::InferRange(Zone* zone) {
2621 if (right()->IsConstant()) {
2622 HConstant* c = HConstant::cast(right());
2623 if (c->HasInteger32Value()) {
2624 Range* result = (left()->range() != NULL)
2625 ? left()->range()->Copy(zone)
2626 : new(zone) Range();
2627 result->Shl(c->Integer32Value());
2628 return result;
2629 }
2630 }
2631 return HValue::InferRange(zone);
2632}
2633
2634
2635Range* HLoadNamedField::InferRange(Zone* zone) {
2636 if (access().representation().IsInteger8()) {
2637 return new(zone) Range(kMinInt8, kMaxInt8);
2638 }
2639 if (access().representation().IsUInteger8()) {
2640 return new(zone) Range(kMinUInt8, kMaxUInt8);
2641 }
2642 if (access().representation().IsInteger16()) {
2643 return new(zone) Range(kMinInt16, kMaxInt16);
2644 }
2645 if (access().representation().IsUInteger16()) {
2646 return new(zone) Range(kMinUInt16, kMaxUInt16);
2647 }
2648 if (access().IsStringLength()) {
2649 return new(zone) Range(0, String::kMaxLength);
2650 }
2651 return HValue::InferRange(zone);
2652}
2653
2654
2655Range* HLoadKeyed::InferRange(Zone* zone) {
2656 switch (elements_kind()) {
2657 case INT8_ELEMENTS:
2658 return new(zone) Range(kMinInt8, kMaxInt8);
2659 case UINT8_ELEMENTS:
2660 case UINT8_CLAMPED_ELEMENTS:
2661 return new(zone) Range(kMinUInt8, kMaxUInt8);
2662 case INT16_ELEMENTS:
2663 return new(zone) Range(kMinInt16, kMaxInt16);
2664 case UINT16_ELEMENTS:
2665 return new(zone) Range(kMinUInt16, kMaxUInt16);
2666 default:
2667 return HValue::InferRange(zone);
2668 }
2669}
2670
2671
2672std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
2673 os << Token::Name(token()) << " ";
2674 return HBinaryOperation::PrintDataTo(os);
2675}
2676
2677
2678std::ostream& HStringCompareAndBranch::PrintDataTo(
2679 std::ostream& os) const { // NOLINT
2680 os << Token::Name(token()) << " ";
2681 return HControlInstruction::PrintDataTo(os);
2682}
2683
2684
2685std::ostream& HCompareNumericAndBranch::PrintDataTo(
2686 std::ostream& os) const { // NOLINT
2687 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
2688 return HControlInstruction::PrintDataTo(os);
2689}
2690
2691
2692std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
2693 std::ostream& os) const { // NOLINT
2694 os << NameOf(left()) << " " << NameOf(right());
2695 return HControlInstruction::PrintDataTo(os);
2696}
2697
2698
2699bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2700 if (known_successor_index() != kNoKnownSuccessorIndex) {
2701 *block = SuccessorAt(known_successor_index());
2702 return true;
2703 }
2704 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
2705 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
2706 ? FirstSuccessor() : SecondSuccessor();
2707 return true;
2708 }
2709 *block = NULL;
2710 return false;
2711}
2712
2713
2714bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2715 if (known_successor_index() != kNoKnownSuccessorIndex) {
2716 *block = SuccessorAt(known_successor_index());
2717 return true;
2718 }
2719 if (FLAG_fold_constants && value()->IsConstant()) {
2720 *block = HConstant::cast(value())->HasStringValue()
2721 ? FirstSuccessor() : SecondSuccessor();
2722 return true;
2723 }
2724 if (value()->type().IsString()) {
2725 *block = FirstSuccessor();
2726 return true;
2727 }
2728 if (value()->type().IsSmi() ||
2729 value()->type().IsNull() ||
2730 value()->type().IsBoolean() ||
2731 value()->type().IsUndefined() ||
2732 value()->type().IsJSReceiver()) {
2733 *block = SecondSuccessor();
2734 return true;
2735 }
2736 *block = NULL;
2737 return false;
2738}
2739
2740
2741bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2742 if (FLAG_fold_constants && value()->IsConstant()) {
2743 *block = HConstant::cast(value())->IsUndetectable()
2744 ? FirstSuccessor() : SecondSuccessor();
2745 return true;
2746 }
Ben Murdochda12d292016-06-02 14:46:10 +01002747 if (value()->type().IsNull() || value()->type().IsUndefined()) {
2748 *block = FirstSuccessor();
2749 return true;
2750 }
2751 if (value()->type().IsBoolean() ||
2752 value()->type().IsSmi() ||
2753 value()->type().IsString() ||
2754 value()->type().IsJSReceiver()) {
2755 *block = SecondSuccessor();
2756 return true;
2757 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002758 *block = NULL;
2759 return false;
2760}
2761
2762
2763bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2764 if (FLAG_fold_constants && value()->IsConstant()) {
2765 InstanceType type = HConstant::cast(value())->GetInstanceType();
2766 *block = (from_ <= type) && (type <= to_)
2767 ? FirstSuccessor() : SecondSuccessor();
2768 return true;
2769 }
2770 *block = NULL;
2771 return false;
2772}
2773
2774
2775void HCompareHoleAndBranch::InferRepresentation(
2776 HInferRepresentationPhase* h_infer) {
2777 ChangeRepresentation(value()->representation());
2778}
2779
2780
2781bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2782 if (left() == right() &&
2783 left()->representation().IsSmiOrInteger32()) {
2784 *block = (token() == Token::EQ ||
2785 token() == Token::EQ_STRICT ||
2786 token() == Token::LTE ||
2787 token() == Token::GTE)
2788 ? FirstSuccessor() : SecondSuccessor();
2789 return true;
2790 }
2791 *block = NULL;
2792 return false;
2793}
2794
2795
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002796std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
2797 return os << *SuccessorAt(0);
2798}
2799
2800
2801void HCompareNumericAndBranch::InferRepresentation(
2802 HInferRepresentationPhase* h_infer) {
2803 Representation left_rep = left()->representation();
2804 Representation right_rep = right()->representation();
2805 Representation observed_left = observed_input_representation(0);
2806 Representation observed_right = observed_input_representation(1);
2807
2808 Representation rep = Representation::None();
2809 rep = rep.generalize(observed_left);
2810 rep = rep.generalize(observed_right);
2811 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
2812 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2813 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2814 } else {
2815 rep = Representation::Double();
2816 }
2817
2818 if (rep.IsDouble()) {
2819 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
2820 // and !=) have special handling of undefined, e.g. undefined == undefined
2821 // is 'true'. Relational comparisons have a different semantic, first
2822 // calling ToPrimitive() on their arguments. The standard Crankshaft
2823 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
2824 // inputs are doubles caused 'undefined' to be converted to NaN. That's
2825 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
2826 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
2827 // it is not consistent with the spec. For example, it would cause undefined
2828 // == undefined (should be true) to be evaluated as NaN == NaN
2829 // (false). Therefore, any comparisons other than ordered relational
2830 // comparisons must cause a deopt when one of their arguments is undefined.
2831 // See also v8:1434
Ben Murdoch097c5b22016-05-18 11:27:45 +01002832 if (Token::IsOrderedRelationalCompareOp(token_)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002833 SetFlag(kAllowUndefinedAsNaN);
2834 }
2835 }
2836 ChangeRepresentation(rep);
2837}
2838
2839
2840std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
2841 return os << index();
2842}
2843
2844
2845std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
2846 os << NameOf(object()) << access_;
2847
2848 if (maps() != NULL) {
2849 os << " [" << *maps()->at(0).handle();
2850 for (int i = 1; i < maps()->size(); ++i) {
2851 os << "," << *maps()->at(i).handle();
2852 }
2853 os << "]";
2854 }
2855
2856 if (HasDependency()) os << " " << NameOf(dependency());
2857 return os;
2858}
2859
2860
2861std::ostream& HLoadNamedGeneric::PrintDataTo(
2862 std::ostream& os) const { // NOLINT
2863 Handle<String> n = Handle<String>::cast(name());
2864 return os << NameOf(object()) << "." << n->ToCString().get();
2865}
2866
2867
2868std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
2869 if (!is_fixed_typed_array()) {
2870 os << NameOf(elements());
2871 } else {
2872 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
2873 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
2874 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
2875 }
2876
2877 os << "[" << NameOf(key());
2878 if (IsDehoisted()) os << " + " << base_offset();
2879 os << "]";
2880
2881 if (HasDependency()) os << " " << NameOf(dependency());
2882 if (RequiresHoleCheck()) os << " check_hole";
2883 return os;
2884}
2885
2886
2887bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
2888 // The base offset is usually simply the size of the array header, except
2889 // with dehoisting adds an addition offset due to a array index key
2890 // manipulation, in which case it becomes (array header size +
2891 // constant-offset-from-key * kPointerSize)
2892 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
2893 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
2894 addition_result += increase_by_value;
2895 if (!addition_result.IsValid()) return false;
2896 base_offset = addition_result.ValueOrDie();
2897 if (!BaseOffsetField::is_valid(base_offset)) return false;
2898 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
2899 return true;
2900}
2901
2902
2903bool HLoadKeyed::UsesMustHandleHole() const {
2904 if (IsFastPackedElementsKind(elements_kind())) {
2905 return false;
2906 }
2907
2908 if (IsFixedTypedArrayElementsKind(elements_kind())) {
2909 return false;
2910 }
2911
2912 if (hole_mode() == ALLOW_RETURN_HOLE) {
2913 if (IsFastDoubleElementsKind(elements_kind())) {
2914 return AllUsesCanTreatHoleAsNaN();
2915 }
2916 return true;
2917 }
2918
2919 if (IsFastDoubleElementsKind(elements_kind())) {
2920 return false;
2921 }
2922
2923 // Holes are only returned as tagged values.
2924 if (!representation().IsTagged()) {
2925 return false;
2926 }
2927
2928 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2929 HValue* use = it.value();
2930 if (!use->IsChange()) return false;
2931 }
2932
2933 return true;
2934}
2935
2936
2937bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
2938 return IsFastDoubleElementsKind(elements_kind()) &&
2939 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
2940}
2941
2942
2943bool HLoadKeyed::RequiresHoleCheck() const {
2944 if (IsFastPackedElementsKind(elements_kind())) {
2945 return false;
2946 }
2947
2948 if (IsFixedTypedArrayElementsKind(elements_kind())) {
2949 return false;
2950 }
2951
2952 if (hole_mode() == CONVERT_HOLE_TO_UNDEFINED) {
2953 return false;
2954 }
2955
2956 return !UsesMustHandleHole();
2957}
2958
2959
2960std::ostream& HLoadKeyedGeneric::PrintDataTo(
2961 std::ostream& os) const { // NOLINT
2962 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
2963}
2964
2965
2966HValue* HLoadKeyedGeneric::Canonicalize() {
2967 // Recognize generic keyed loads that use property name generated
2968 // by for-in statement as a key and rewrite them into fast property load
2969 // by index.
2970 if (key()->IsLoadKeyed()) {
2971 HLoadKeyed* key_load = HLoadKeyed::cast(key());
2972 if (key_load->elements()->IsForInCacheArray()) {
2973 HForInCacheArray* names_cache =
2974 HForInCacheArray::cast(key_load->elements());
2975
2976 if (names_cache->enumerable() == object()) {
2977 HForInCacheArray* index_cache =
2978 names_cache->index_cache();
2979 HCheckMapValue* map_check = HCheckMapValue::New(
2980 block()->graph()->isolate(), block()->graph()->zone(),
2981 block()->graph()->GetInvalidContext(), object(),
2982 names_cache->map());
2983 HInstruction* index = HLoadKeyed::New(
2984 block()->graph()->isolate(), block()->graph()->zone(),
2985 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
2986 key_load->key(), nullptr, key_load->elements_kind());
2987 map_check->InsertBefore(this);
2988 index->InsertBefore(this);
2989 return Prepend(new(block()->zone()) HLoadFieldByIndex(
2990 object(), index));
2991 }
2992 }
2993 }
2994
2995 return this;
2996}
2997
2998
2999std::ostream& HStoreNamedGeneric::PrintDataTo(
3000 std::ostream& os) const { // NOLINT
3001 Handle<String> n = Handle<String>::cast(name());
3002 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3003 << NameOf(value());
3004}
3005
3006
3007std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3008 os << NameOf(object()) << access_ << " = " << NameOf(value());
3009 if (NeedsWriteBarrier()) os << " (write-barrier)";
3010 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3011 return os;
3012}
3013
3014
3015std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3016 if (!is_fixed_typed_array()) {
3017 os << NameOf(elements());
3018 } else {
3019 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
3020 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
3021 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3022 }
3023
3024 os << "[" << NameOf(key());
3025 if (IsDehoisted()) os << " + " << base_offset();
3026 return os << "] = " << NameOf(value());
3027}
3028
3029
3030std::ostream& HStoreKeyedGeneric::PrintDataTo(
3031 std::ostream& os) const { // NOLINT
3032 return os << NameOf(object()) << "[" << NameOf(key())
3033 << "] = " << NameOf(value());
3034}
3035
3036
3037std::ostream& HTransitionElementsKind::PrintDataTo(
3038 std::ostream& os) const { // NOLINT
3039 os << NameOf(object());
3040 ElementsKind from_kind = original_map().handle()->elements_kind();
3041 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3042 os << " " << *original_map().handle() << " ["
3043 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3044 << *transitioned_map().handle() << " ["
3045 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3046 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3047 return os;
3048}
3049
3050
3051std::ostream& HLoadGlobalGeneric::PrintDataTo(
3052 std::ostream& os) const { // NOLINT
3053 return os << name()->ToCString().get() << " ";
3054}
3055
3056
3057std::ostream& HInnerAllocatedObject::PrintDataTo(
3058 std::ostream& os) const { // NOLINT
3059 os << NameOf(base_object()) << " offset ";
3060 return offset()->PrintTo(os);
3061}
3062
3063
3064std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3065 return os << NameOf(value()) << "[" << slot_index() << "]";
3066}
3067
3068
3069std::ostream& HStoreContextSlot::PrintDataTo(
3070 std::ostream& os) const { // NOLINT
3071 return os << NameOf(context()) << "[" << slot_index()
3072 << "] = " << NameOf(value());
3073}
3074
3075
3076// Implementation of type inference and type conversions. Calculates
3077// the inferred type of this instruction based on the input operands.
3078
3079HType HValue::CalculateInferredType() {
3080 return type_;
3081}
3082
3083
3084HType HPhi::CalculateInferredType() {
3085 if (OperandCount() == 0) return HType::Tagged();
3086 HType result = OperandAt(0)->type();
3087 for (int i = 1; i < OperandCount(); ++i) {
3088 HType current = OperandAt(i)->type();
3089 result = result.Combine(current);
3090 }
3091 return result;
3092}
3093
3094
3095HType HChange::CalculateInferredType() {
3096 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3097 return type();
3098}
3099
3100
3101Representation HUnaryMathOperation::RepresentationFromInputs() {
3102 if (SupportsFlexibleFloorAndRound() &&
3103 (op_ == kMathFloor || op_ == kMathRound)) {
3104 // Floor and Round always take a double input. The integral result can be
3105 // used as an integer or a double. Infer the representation from the uses.
3106 return Representation::None();
3107 }
3108 Representation rep = representation();
3109 // If any of the actual input representation is more general than what we
3110 // have so far but not Tagged, use that representation instead.
3111 Representation input_rep = value()->representation();
3112 if (!input_rep.IsTagged()) {
3113 rep = rep.generalize(input_rep);
3114 }
3115 return rep;
3116}
3117
3118
3119bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3120 HValue* dominator) {
3121 DCHECK(side_effect == kNewSpacePromotion);
Ben Murdochc5610432016-08-08 18:44:38 +01003122 DCHECK(!IsAllocationFolded());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003123 Zone* zone = block()->zone();
3124 Isolate* isolate = block()->isolate();
3125 if (!FLAG_use_allocation_folding) return false;
3126
3127 // Try to fold allocations together with their dominating allocations.
3128 if (!dominator->IsAllocate()) {
3129 if (FLAG_trace_allocation_folding) {
3130 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3131 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3132 }
3133 return false;
3134 }
3135
3136 // Check whether we are folding within the same block for local folding.
3137 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3138 if (FLAG_trace_allocation_folding) {
3139 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3140 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3141 }
3142 return false;
3143 }
3144
3145 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3146 HValue* dominator_size = dominator_allocate->size();
3147 HValue* current_size = size();
3148
3149 // TODO(hpayer): Add support for non-constant allocation in dominator.
Ben Murdochc5610432016-08-08 18:44:38 +01003150 if (!current_size->IsInteger32Constant() ||
3151 !dominator_size->IsInteger32Constant()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003152 if (FLAG_trace_allocation_folding) {
3153 PrintF("#%d (%s) cannot fold into #%d (%s), "
3154 "dynamic allocation size in dominator\n",
3155 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3156 }
3157 return false;
3158 }
3159
3160
3161 if (!IsFoldable(dominator_allocate)) {
3162 if (FLAG_trace_allocation_folding) {
3163 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
3164 Mnemonic(), dominator->id(), dominator->Mnemonic());
3165 }
3166 return false;
3167 }
3168
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003169 DCHECK(
3170 (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
3171 (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
3172
3173 // First update the size of the dominator allocate instruction.
3174 dominator_size = dominator_allocate->size();
3175 int32_t original_object_size =
3176 HConstant::cast(dominator_size)->GetInteger32Constant();
3177 int32_t dominator_size_constant = original_object_size;
3178
3179 if (MustAllocateDoubleAligned()) {
3180 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3181 dominator_size_constant += kDoubleSize / 2;
3182 }
3183 }
3184
Ben Murdochc5610432016-08-08 18:44:38 +01003185 int32_t current_size_max_value = size()->GetInteger32Constant();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003186 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3187
3188 // Since we clear the first word after folded memory, we cannot use the
3189 // whole Page::kMaxRegularHeapObjectSize memory.
3190 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3191 if (FLAG_trace_allocation_folding) {
3192 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3193 id(), Mnemonic(), dominator_allocate->id(),
3194 dominator_allocate->Mnemonic(), new_dominator_size);
3195 }
3196 return false;
3197 }
3198
Ben Murdochc5610432016-08-08 18:44:38 +01003199 HInstruction* new_dominator_size_value = HConstant::CreateAndInsertBefore(
3200 isolate, zone, context(), new_dominator_size, Representation::None(),
3201 dominator_allocate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003202
3203 dominator_allocate->UpdateSize(new_dominator_size_value);
3204
3205 if (MustAllocateDoubleAligned()) {
3206 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3207 dominator_allocate->MakeDoubleAligned();
3208 }
3209 }
3210
Ben Murdochc5610432016-08-08 18:44:38 +01003211 if (IsAllocationFoldingDominator()) {
3212 DeleteAndReplaceWith(dominator_allocate);
3213 if (FLAG_trace_allocation_folding) {
3214 PrintF(
3215 "#%d (%s) folded dominator into #%d (%s), new dominator size: %d\n",
3216 id(), Mnemonic(), dominator_allocate->id(),
3217 dominator_allocate->Mnemonic(), new_dominator_size);
3218 }
3219 return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003220 }
3221
Ben Murdochc5610432016-08-08 18:44:38 +01003222 if (!dominator_allocate->IsAllocationFoldingDominator()) {
3223 HAllocate* first_alloc =
3224 HAllocate::New(isolate, zone, dominator_allocate->context(),
3225 dominator_size, dominator_allocate->type(),
3226 IsNewSpaceAllocation() ? NOT_TENURED : TENURED,
3227 JS_OBJECT_TYPE, block()->graph()->GetConstant0());
3228 first_alloc->InsertAfter(dominator_allocate);
3229 dominator_allocate->ReplaceAllUsesWith(first_alloc);
3230 dominator_allocate->MakeAllocationFoldingDominator();
3231 first_alloc->MakeFoldedAllocation(dominator_allocate);
3232 if (FLAG_trace_allocation_folding) {
3233 PrintF("#%d (%s) inserted for dominator #%d (%s)\n", first_alloc->id(),
3234 first_alloc->Mnemonic(), dominator_allocate->id(),
3235 dominator_allocate->Mnemonic());
3236 }
3237 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003238
Ben Murdochc5610432016-08-08 18:44:38 +01003239 MakeFoldedAllocation(dominator_allocate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003240
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003241 if (FLAG_trace_allocation_folding) {
Ben Murdochc5610432016-08-08 18:44:38 +01003242 PrintF("#%d (%s) folded into #%d (%s), new dominator size: %d\n", id(),
3243 Mnemonic(), dominator_allocate->id(), dominator_allocate->Mnemonic(),
3244 new_dominator_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003245 }
3246 return true;
3247}
3248
3249
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003250std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
3251 os << NameOf(size()) << " (";
3252 if (IsNewSpaceAllocation()) os << "N";
3253 if (IsOldSpaceAllocation()) os << "P";
3254 if (MustAllocateDoubleAligned()) os << "A";
3255 if (MustPrefillWithFiller()) os << "F";
3256 return os << ")";
3257}
3258
3259
3260bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3261 // The base offset is usually simply the size of the array header, except
3262 // with dehoisting adds an addition offset due to a array index key
3263 // manipulation, in which case it becomes (array header size +
3264 // constant-offset-from-key * kPointerSize)
3265 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
3266 addition_result += increase_by_value;
3267 if (!addition_result.IsValid()) return false;
3268 base_offset_ = addition_result.ValueOrDie();
3269 return true;
3270}
3271
3272
3273bool HStoreKeyed::NeedsCanonicalization() {
3274 switch (value()->opcode()) {
3275 case kLoadKeyed: {
3276 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
3277 return IsFixedFloatElementsKind(load_kind);
3278 }
3279 case kChange: {
3280 Representation from = HChange::cast(value())->from();
3281 return from.IsTagged() || from.IsHeapObject();
3282 }
3283 case kLoadNamedField:
3284 case kPhi: {
3285 // Better safe than sorry...
3286 return true;
3287 }
3288 default:
3289 return false;
3290 }
3291}
3292
3293
3294#define H_CONSTANT_INT(val) \
3295 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
3296#define H_CONSTANT_DOUBLE(val) \
3297 HConstant::New(isolate, zone, context, static_cast<double>(val))
3298
Ben Murdoch097c5b22016-05-18 11:27:45 +01003299#define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3300 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3301 HValue* left, HValue* right) { \
3302 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3303 HConstant* c_left = HConstant::cast(left); \
3304 HConstant* c_right = HConstant::cast(right); \
3305 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3306 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3307 if (IsInt32Double(double_res)) { \
3308 return H_CONSTANT_INT(double_res); \
3309 } \
3310 return H_CONSTANT_DOUBLE(double_res); \
3311 } \
3312 } \
3313 return new (zone) HInstr(context, left, right); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003314 }
3315
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003316DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3317DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3318DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3319
3320#undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3321
3322
3323HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
3324 HValue* left, HValue* right,
3325 PretenureFlag pretenure_flag,
3326 StringAddFlags flags,
3327 Handle<AllocationSite> allocation_site) {
3328 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3329 HConstant* c_right = HConstant::cast(right);
3330 HConstant* c_left = HConstant::cast(left);
3331 if (c_left->HasStringValue() && c_right->HasStringValue()) {
3332 Handle<String> left_string = c_left->StringValue();
3333 Handle<String> right_string = c_right->StringValue();
3334 // Prevent possible exception by invalid string length.
3335 if (left_string->length() + right_string->length() < String::kMaxLength) {
3336 MaybeHandle<String> concat = isolate->factory()->NewConsString(
3337 c_left->StringValue(), c_right->StringValue());
3338 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
3339 }
3340 }
3341 }
3342 return new (zone)
3343 HStringAdd(context, left, right, pretenure_flag, flags, allocation_site);
3344}
3345
3346
3347std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
3348 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3349 os << "_CheckBoth";
3350 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
3351 os << "_CheckLeft";
3352 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
3353 os << "_CheckRight";
3354 }
3355 HBinaryOperation::PrintDataTo(os);
3356 os << " (";
3357 if (pretenure_flag() == NOT_TENURED)
3358 os << "N";
3359 else if (pretenure_flag() == TENURED)
3360 os << "D";
3361 return os << ")";
3362}
3363
3364
3365HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
3366 HValue* context, HValue* char_code) {
3367 if (FLAG_fold_constants && char_code->IsConstant()) {
3368 HConstant* c_code = HConstant::cast(char_code);
3369 if (c_code->HasNumberValue()) {
3370 if (std::isfinite(c_code->DoubleValue())) {
3371 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
3372 return HConstant::New(
3373 isolate, zone, context,
3374 isolate->factory()->LookupSingleCharacterStringFromCode(code));
3375 }
3376 return HConstant::New(isolate, zone, context,
3377 isolate->factory()->empty_string());
3378 }
3379 }
3380 return new(zone) HStringCharFromCode(context, char_code);
3381}
3382
3383
3384HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
3385 HValue* context, HValue* value,
3386 BuiltinFunctionId op) {
3387 do {
3388 if (!FLAG_fold_constants) break;
3389 if (!value->IsConstant()) break;
3390 HConstant* constant = HConstant::cast(value);
3391 if (!constant->HasNumberValue()) break;
3392 double d = constant->DoubleValue();
3393 if (std::isnan(d)) { // NaN poisons everything.
3394 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3395 }
3396 if (std::isinf(d)) { // +Infinity and -Infinity.
3397 switch (op) {
3398 case kMathExp:
3399 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
3400 case kMathLog:
3401 case kMathSqrt:
3402 return H_CONSTANT_DOUBLE(
3403 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
3404 case kMathPowHalf:
3405 case kMathAbs:
3406 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
3407 case kMathRound:
3408 case kMathFround:
3409 case kMathFloor:
3410 return H_CONSTANT_DOUBLE(d);
3411 case kMathClz32:
3412 return H_CONSTANT_INT(32);
3413 default:
3414 UNREACHABLE();
3415 break;
3416 }
3417 }
3418 switch (op) {
3419 case kMathExp:
3420 lazily_initialize_fast_exp(isolate);
3421 return H_CONSTANT_DOUBLE(fast_exp(d, isolate));
3422 case kMathLog:
3423 return H_CONSTANT_DOUBLE(std::log(d));
3424 case kMathSqrt:
3425 lazily_initialize_fast_sqrt(isolate);
3426 return H_CONSTANT_DOUBLE(fast_sqrt(d, isolate));
3427 case kMathPowHalf:
3428 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
3429 case kMathAbs:
3430 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
3431 case kMathRound:
3432 // -0.5 .. -0.0 round to -0.0.
3433 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
3434 // Doubles are represented as Significant * 2 ^ Exponent. If the
3435 // Exponent is not negative, the double value is already an integer.
3436 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
3437 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
3438 case kMathFround:
3439 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
3440 case kMathFloor:
3441 return H_CONSTANT_DOUBLE(Floor(d));
3442 case kMathClz32: {
3443 uint32_t i = DoubleToUint32(d);
3444 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
3445 }
3446 default:
3447 UNREACHABLE();
3448 break;
3449 }
3450 } while (false);
3451 return new(zone) HUnaryMathOperation(context, value, op);
3452}
3453
3454
3455Representation HUnaryMathOperation::RepresentationFromUses() {
3456 if (op_ != kMathFloor && op_ != kMathRound) {
3457 return HValue::RepresentationFromUses();
3458 }
3459
3460 // The instruction can have an int32 or double output. Prefer a double
3461 // representation if there are double uses.
3462 bool use_double = false;
3463
3464 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3465 HValue* use = it.value();
3466 int use_index = it.index();
3467 Representation rep_observed = use->observed_input_representation(use_index);
3468 Representation rep_required = use->RequiredInputRepresentation(use_index);
3469 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
3470 if (use_double && !FLAG_trace_representation) {
3471 // Having seen one double is enough.
3472 break;
3473 }
3474 if (FLAG_trace_representation) {
3475 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
3476 PrintF("#%d %s is used by #%d %s as %s%s\n",
3477 id(), Mnemonic(), use->id(),
3478 use->Mnemonic(), rep_observed.Mnemonic(),
3479 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
3480 } else {
3481 PrintF("#%d %s is required by #%d %s as %s%s\n",
3482 id(), Mnemonic(), use->id(),
3483 use->Mnemonic(), rep_required.Mnemonic(),
3484 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
3485 }
3486 }
3487 }
3488 return use_double ? Representation::Double() : Representation::Integer32();
3489}
3490
3491
3492HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
3493 HValue* left, HValue* right) {
3494 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3495 HConstant* c_left = HConstant::cast(left);
3496 HConstant* c_right = HConstant::cast(right);
3497 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3498 double result =
3499 power_helper(isolate, c_left->DoubleValue(), c_right->DoubleValue());
3500 return H_CONSTANT_DOUBLE(std::isnan(result)
3501 ? std::numeric_limits<double>::quiet_NaN()
3502 : result);
3503 }
3504 }
3505 return new(zone) HPower(left, right);
3506}
3507
3508
3509HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
3510 HValue* left, HValue* right, Operation op) {
3511 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3512 HConstant* c_left = HConstant::cast(left);
3513 HConstant* c_right = HConstant::cast(right);
3514 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3515 double d_left = c_left->DoubleValue();
3516 double d_right = c_right->DoubleValue();
3517 if (op == kMathMin) {
3518 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
3519 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
3520 if (d_left == d_right) {
3521 // Handle +0 and -0.
3522 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
3523 : d_right);
3524 }
3525 } else {
3526 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
3527 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
3528 if (d_left == d_right) {
3529 // Handle +0 and -0.
3530 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
3531 : d_left);
3532 }
3533 }
3534 // All comparisons failed, must be NaN.
3535 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3536 }
3537 }
3538 return new(zone) HMathMinMax(context, left, right, op);
3539}
3540
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003541HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003542 HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003543 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3544 HConstant* c_left = HConstant::cast(left);
3545 HConstant* c_right = HConstant::cast(right);
3546 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
3547 int32_t dividend = c_left->Integer32Value();
3548 int32_t divisor = c_right->Integer32Value();
3549 if (dividend == kMinInt && divisor == -1) {
3550 return H_CONSTANT_DOUBLE(-0.0);
3551 }
3552 if (divisor != 0) {
3553 int32_t res = dividend % divisor;
3554 if ((res == 0) && (dividend < 0)) {
3555 return H_CONSTANT_DOUBLE(-0.0);
3556 }
3557 return H_CONSTANT_INT(res);
3558 }
3559 }
3560 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003561 return new (zone) HMod(context, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003562}
3563
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003564HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003565 HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003566 // If left and right are constant values, try to return a constant value.
3567 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3568 HConstant* c_left = HConstant::cast(left);
3569 HConstant* c_right = HConstant::cast(right);
3570 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3571 if (c_right->DoubleValue() != 0) {
3572 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
3573 if (IsInt32Double(double_res)) {
3574 return H_CONSTANT_INT(double_res);
3575 }
3576 return H_CONSTANT_DOUBLE(double_res);
3577 } else {
3578 int sign = Double(c_left->DoubleValue()).Sign() *
3579 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
3580 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
3581 }
3582 }
3583 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003584 return new (zone) HDiv(context, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003585}
3586
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003587HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003588 Token::Value op, HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003589 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3590 HConstant* c_left = HConstant::cast(left);
3591 HConstant* c_right = HConstant::cast(right);
3592 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3593 int32_t result;
3594 int32_t v_left = c_left->NumberValueAsInteger32();
3595 int32_t v_right = c_right->NumberValueAsInteger32();
3596 switch (op) {
3597 case Token::BIT_XOR:
3598 result = v_left ^ v_right;
3599 break;
3600 case Token::BIT_AND:
3601 result = v_left & v_right;
3602 break;
3603 case Token::BIT_OR:
3604 result = v_left | v_right;
3605 break;
3606 default:
3607 result = 0; // Please the compiler.
3608 UNREACHABLE();
3609 }
3610 return H_CONSTANT_INT(result);
3611 }
3612 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003613 return new (zone) HBitwise(context, op, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003614}
3615
Ben Murdoch097c5b22016-05-18 11:27:45 +01003616#define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
3617 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3618 HValue* left, HValue* right) { \
3619 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3620 HConstant* c_left = HConstant::cast(left); \
3621 HConstant* c_right = HConstant::cast(right); \
3622 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3623 return H_CONSTANT_INT(result); \
3624 } \
3625 } \
3626 return new (zone) HInstr(context, left, right); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003627 }
3628
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003629DEFINE_NEW_H_BITWISE_INSTR(HSar,
3630c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
3631DEFINE_NEW_H_BITWISE_INSTR(HShl,
3632c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
3633
3634#undef DEFINE_NEW_H_BITWISE_INSTR
3635
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003636HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003637 HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003638 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3639 HConstant* c_left = HConstant::cast(left);
3640 HConstant* c_right = HConstant::cast(right);
3641 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3642 int32_t left_val = c_left->NumberValueAsInteger32();
3643 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
3644 if ((right_val == 0) && (left_val < 0)) {
3645 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
3646 }
3647 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
3648 }
3649 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003650 return new (zone) HShr(context, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003651}
3652
3653
3654HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
3655 HValue* context, String::Encoding encoding,
3656 HValue* string, HValue* index) {
3657 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
3658 HConstant* c_string = HConstant::cast(string);
3659 HConstant* c_index = HConstant::cast(index);
3660 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
3661 Handle<String> s = c_string->StringValue();
3662 int32_t i = c_index->Integer32Value();
3663 DCHECK_LE(0, i);
3664 DCHECK_LT(i, s->length());
3665 return H_CONSTANT_INT(s->Get(i));
3666 }
3667 }
3668 return new(zone) HSeqStringGetChar(encoding, string, index);
3669}
3670
3671
3672#undef H_CONSTANT_INT
3673#undef H_CONSTANT_DOUBLE
3674
3675
3676std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
3677 os << Token::Name(op_) << " ";
3678 return HBitwiseBinaryOperation::PrintDataTo(os);
3679}
3680
3681
3682void HPhi::SimplifyConstantInputs() {
3683 // Convert constant inputs to integers when all uses are truncating.
3684 // This must happen before representation inference takes place.
3685 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
3686 for (int i = 0; i < OperandCount(); ++i) {
3687 if (!OperandAt(i)->IsConstant()) return;
3688 }
3689 HGraph* graph = block()->graph();
3690 for (int i = 0; i < OperandCount(); ++i) {
3691 HConstant* operand = HConstant::cast(OperandAt(i));
3692 if (operand->HasInteger32Value()) {
3693 continue;
3694 } else if (operand->HasDoubleValue()) {
3695 HConstant* integer_input = HConstant::New(
3696 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
3697 DoubleToInt32(operand->DoubleValue()));
3698 integer_input->InsertAfter(operand);
3699 SetOperandAt(i, integer_input);
3700 } else if (operand->HasBooleanValue()) {
3701 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
3702 : graph->GetConstant0());
3703 } else if (operand->ImmortalImmovable()) {
3704 SetOperandAt(i, graph->GetConstant0());
3705 }
3706 }
3707 // Overwrite observed input representations because they are likely Tagged.
3708 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3709 HValue* use = it.value();
3710 if (use->IsBinaryOperation()) {
3711 HBinaryOperation::cast(use)->set_observed_input_representation(
3712 it.index(), Representation::Smi());
3713 }
3714 }
3715}
3716
3717
3718void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
3719 DCHECK(CheckFlag(kFlexibleRepresentation));
3720 Representation new_rep = RepresentationFromUses();
3721 UpdateRepresentation(new_rep, h_infer, "uses");
3722 new_rep = RepresentationFromInputs();
3723 UpdateRepresentation(new_rep, h_infer, "inputs");
3724 new_rep = RepresentationFromUseRequirements();
3725 UpdateRepresentation(new_rep, h_infer, "use requirements");
3726}
3727
3728
3729Representation HPhi::RepresentationFromInputs() {
3730 Representation r = representation();
3731 for (int i = 0; i < OperandCount(); ++i) {
3732 // Ignore conservative Tagged assumption of parameters if we have
3733 // reason to believe that it's too conservative.
3734 if (has_type_feedback_from_uses() && OperandAt(i)->IsParameter()) {
3735 continue;
3736 }
3737
3738 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
3739 }
3740 return r;
3741}
3742
3743
3744// Returns a representation if all uses agree on the same representation.
3745// Integer32 is also returned when some uses are Smi but others are Integer32.
3746Representation HValue::RepresentationFromUseRequirements() {
3747 Representation rep = Representation::None();
3748 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3749 // Ignore the use requirement from never run code
3750 if (it.value()->block()->IsUnreachable()) continue;
3751
3752 // We check for observed_input_representation elsewhere.
3753 Representation use_rep =
3754 it.value()->RequiredInputRepresentation(it.index());
3755 if (rep.IsNone()) {
3756 rep = use_rep;
3757 continue;
3758 }
3759 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
3760 if (rep.generalize(use_rep).IsInteger32()) {
3761 rep = Representation::Integer32();
3762 continue;
3763 }
3764 return Representation::None();
3765 }
3766 return rep;
3767}
3768
3769
3770bool HValue::HasNonSmiUse() {
3771 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3772 // We check for observed_input_representation elsewhere.
3773 Representation use_rep =
3774 it.value()->RequiredInputRepresentation(it.index());
3775 if (!use_rep.IsNone() &&
3776 !use_rep.IsSmi() &&
3777 !use_rep.IsTagged()) {
3778 return true;
3779 }
3780 }
3781 return false;
3782}
3783
3784
3785// Node-specific verification code is only included in debug mode.
3786#ifdef DEBUG
3787
3788void HPhi::Verify() {
3789 DCHECK(OperandCount() == block()->predecessors()->length());
3790 for (int i = 0; i < OperandCount(); ++i) {
3791 HValue* value = OperandAt(i);
3792 HBasicBlock* defining_block = value->block();
3793 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
3794 DCHECK(defining_block == predecessor_block ||
3795 defining_block->Dominates(predecessor_block));
3796 }
3797}
3798
3799
3800void HSimulate::Verify() {
3801 HInstruction::Verify();
3802 DCHECK(HasAstId() || next()->IsEnterInlined());
3803}
3804
3805
3806void HCheckHeapObject::Verify() {
3807 HInstruction::Verify();
3808 DCHECK(HasNoUses());
3809}
3810
3811
3812void HCheckValue::Verify() {
3813 HInstruction::Verify();
3814 DCHECK(HasNoUses());
3815}
3816
3817#endif
3818
3819
3820HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
3821 DCHECK(offset >= 0);
3822 DCHECK(offset < FixedArray::kHeaderSize);
3823 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
3824 return HObjectAccess(kInobject, offset);
3825}
3826
3827
3828HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
3829 Representation representation) {
3830 DCHECK(offset >= 0);
3831 Portion portion = kInobject;
3832
3833 if (offset == JSObject::kElementsOffset) {
3834 portion = kElementsPointer;
3835 } else if (offset == JSObject::kMapOffset) {
3836 portion = kMaps;
3837 }
3838 bool existing_inobject_property = true;
3839 if (!map.is_null()) {
3840 existing_inobject_property = (offset <
3841 map->instance_size() - map->unused_property_fields() * kPointerSize);
3842 }
3843 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
3844 false, existing_inobject_property);
3845}
3846
3847
3848HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
3849 switch (offset) {
3850 case AllocationSite::kTransitionInfoOffset:
3851 return HObjectAccess(kInobject, offset, Representation::Tagged());
3852 case AllocationSite::kNestedSiteOffset:
3853 return HObjectAccess(kInobject, offset, Representation::Tagged());
3854 case AllocationSite::kPretenureDataOffset:
3855 return HObjectAccess(kInobject, offset, Representation::Smi());
3856 case AllocationSite::kPretenureCreateCountOffset:
3857 return HObjectAccess(kInobject, offset, Representation::Smi());
3858 case AllocationSite::kDependentCodeOffset:
3859 return HObjectAccess(kInobject, offset, Representation::Tagged());
3860 case AllocationSite::kWeakNextOffset:
3861 return HObjectAccess(kInobject, offset, Representation::Tagged());
3862 default:
3863 UNREACHABLE();
3864 }
3865 return HObjectAccess(kInobject, offset);
3866}
3867
3868
3869HObjectAccess HObjectAccess::ForContextSlot(int index) {
3870 DCHECK(index >= 0);
3871 Portion portion = kInobject;
3872 int offset = Context::kHeaderSize + index * kPointerSize;
3873 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
3874 return HObjectAccess(portion, offset, Representation::Tagged());
3875}
3876
3877
3878HObjectAccess HObjectAccess::ForScriptContext(int index) {
3879 DCHECK(index >= 0);
3880 Portion portion = kInobject;
3881 int offset = ScriptContextTable::GetContextOffset(index);
3882 return HObjectAccess(portion, offset, Representation::Tagged());
3883}
3884
3885
3886HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
3887 DCHECK(offset >= 0);
3888 Portion portion = kInobject;
3889
3890 if (offset == JSObject::kElementsOffset) {
3891 portion = kElementsPointer;
3892 } else if (offset == JSArray::kLengthOffset) {
3893 portion = kArrayLengths;
3894 } else if (offset == JSObject::kMapOffset) {
3895 portion = kMaps;
3896 }
3897 return HObjectAccess(portion, offset);
3898}
3899
3900
3901HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
3902 Representation representation) {
3903 DCHECK(offset >= 0);
3904 return HObjectAccess(kBackingStore, offset, representation,
3905 Handle<String>::null(), false, false);
3906}
3907
3908
3909HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
3910 Representation representation,
3911 Handle<Name> name) {
3912 if (index < 0) {
3913 // Negative property indices are in-object properties, indexed
3914 // from the end of the fixed part of the object.
3915 int offset = (index * kPointerSize) + map->instance_size();
3916 return HObjectAccess(kInobject, offset, representation, name, false, true);
3917 } else {
3918 // Non-negative property indices are in the properties array.
3919 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
3920 return HObjectAccess(kBackingStore, offset, representation, name,
3921 false, false);
3922 }
3923}
3924
3925
3926void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
3927 // set the appropriate GVN flags for a given load or store instruction
3928 if (access_type == STORE) {
3929 // track dominating allocations in order to eliminate write barriers
3930 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
3931 instr->SetFlag(HValue::kTrackSideEffectDominators);
3932 } else {
3933 // try to GVN loads, but don't hoist above map changes
3934 instr->SetFlag(HValue::kUseGVN);
3935 instr->SetDependsOnFlag(::v8::internal::kMaps);
3936 }
3937
3938 switch (portion()) {
3939 case kArrayLengths:
3940 if (access_type == STORE) {
3941 instr->SetChangesFlag(::v8::internal::kArrayLengths);
3942 } else {
3943 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
3944 }
3945 break;
3946 case kStringLengths:
3947 if (access_type == STORE) {
3948 instr->SetChangesFlag(::v8::internal::kStringLengths);
3949 } else {
3950 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
3951 }
3952 break;
3953 case kInobject:
3954 if (access_type == STORE) {
3955 instr->SetChangesFlag(::v8::internal::kInobjectFields);
3956 } else {
3957 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
3958 }
3959 break;
3960 case kDouble:
3961 if (access_type == STORE) {
3962 instr->SetChangesFlag(::v8::internal::kDoubleFields);
3963 } else {
3964 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
3965 }
3966 break;
3967 case kBackingStore:
3968 if (access_type == STORE) {
3969 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
3970 } else {
3971 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
3972 }
3973 break;
3974 case kElementsPointer:
3975 if (access_type == STORE) {
3976 instr->SetChangesFlag(::v8::internal::kElementsPointer);
3977 } else {
3978 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
3979 }
3980 break;
3981 case kMaps:
3982 if (access_type == STORE) {
3983 instr->SetChangesFlag(::v8::internal::kMaps);
3984 } else {
3985 instr->SetDependsOnFlag(::v8::internal::kMaps);
3986 }
3987 break;
3988 case kExternalMemory:
3989 if (access_type == STORE) {
3990 instr->SetChangesFlag(::v8::internal::kExternalMemory);
3991 } else {
3992 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
3993 }
3994 break;
3995 }
3996}
3997
3998
3999std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4000 os << ".";
4001
4002 switch (access.portion()) {
4003 case HObjectAccess::kArrayLengths:
4004 case HObjectAccess::kStringLengths:
4005 os << "%length";
4006 break;
4007 case HObjectAccess::kElementsPointer:
4008 os << "%elements";
4009 break;
4010 case HObjectAccess::kMaps:
4011 os << "%map";
4012 break;
4013 case HObjectAccess::kDouble: // fall through
4014 case HObjectAccess::kInobject:
4015 if (!access.name().is_null() && access.name()->IsString()) {
4016 os << Handle<String>::cast(access.name())->ToCString().get();
4017 }
4018 os << "[in-object]";
4019 break;
4020 case HObjectAccess::kBackingStore:
4021 if (!access.name().is_null() && access.name()->IsString()) {
4022 os << Handle<String>::cast(access.name())->ToCString().get();
4023 }
4024 os << "[backing-store]";
4025 break;
4026 case HObjectAccess::kExternalMemory:
4027 os << "[external-memory]";
4028 break;
4029 }
4030
4031 return os << "@" << access.offset();
4032}
4033
4034} // namespace internal
4035} // namespace v8