blob: 6036d3fb68c6e99f250c8a1c22da9fe9aeefa79b [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen-instructions.h"
6
7#include "src/base/bits.h"
Ben Murdoch61f157c2016-09-16 13:49:30 +01008#include "src/base/ieee754.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/base/safe_math.h"
10#include "src/crankshaft/hydrogen-infer-representation.h"
11#include "src/double.h"
12#include "src/elements.h"
13#include "src/factory.h"
14
15#if V8_TARGET_ARCH_IA32
16#include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
17#elif V8_TARGET_ARCH_X64
18#include "src/crankshaft/x64/lithium-x64.h" // NOLINT
19#elif V8_TARGET_ARCH_ARM64
20#include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
21#elif V8_TARGET_ARCH_ARM
22#include "src/crankshaft/arm/lithium-arm.h" // NOLINT
23#elif V8_TARGET_ARCH_PPC
24#include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
25#elif V8_TARGET_ARCH_MIPS
26#include "src/crankshaft/mips/lithium-mips.h" // NOLINT
27#elif V8_TARGET_ARCH_MIPS64
28#include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +010029#elif V8_TARGET_ARCH_S390
30#include "src/crankshaft/s390/lithium-s390.h" // NOLINT
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031#elif V8_TARGET_ARCH_X87
32#include "src/crankshaft/x87/lithium-x87.h" // NOLINT
33#else
34#error Unsupported target architecture.
35#endif
36
37namespace v8 {
38namespace internal {
39
40#define DEFINE_COMPILE(type) \
41 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
42 return builder->Do##type(this); \
43 }
44HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
45#undef DEFINE_COMPILE
46
47
48Isolate* HValue::isolate() const {
49 DCHECK(block() != NULL);
50 return block()->isolate();
51}
52
53
54void HValue::AssumeRepresentation(Representation r) {
55 if (CheckFlag(kFlexibleRepresentation)) {
56 ChangeRepresentation(r);
57 // The representation of the value is dictated by type feedback and
58 // will not be changed later.
59 ClearFlag(kFlexibleRepresentation);
60 }
61}
62
63
64void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
65 DCHECK(CheckFlag(kFlexibleRepresentation));
66 Representation new_rep = RepresentationFromInputs();
67 UpdateRepresentation(new_rep, h_infer, "inputs");
68 new_rep = RepresentationFromUses();
69 UpdateRepresentation(new_rep, h_infer, "uses");
70 if (representation().IsSmi() && HasNonSmiUse()) {
71 UpdateRepresentation(
72 Representation::Integer32(), h_infer, "use requirements");
73 }
74}
75
76
77Representation HValue::RepresentationFromUses() {
78 if (HasNoUses()) return Representation::None();
79 Representation result = Representation::None();
80
81 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
82 HValue* use = it.value();
83 Representation rep = use->observed_input_representation(it.index());
84 result = result.generalize(rep);
85
86 if (FLAG_trace_representation) {
87 PrintF("#%d %s is used by #%d %s as %s%s\n",
88 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
89 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
90 }
91 }
92 if (IsPhi()) {
93 result = result.generalize(
94 HPhi::cast(this)->representation_from_indirect_uses());
95 }
96
97 // External representations are dealt with separately.
98 return result.IsExternal() ? Representation::None() : result;
99}
100
101
102void HValue::UpdateRepresentation(Representation new_rep,
103 HInferRepresentationPhase* h_infer,
104 const char* reason) {
105 Representation r = representation();
106 if (new_rep.is_more_general_than(r)) {
107 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
108 if (FLAG_trace_representation) {
109 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
110 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
111 }
112 ChangeRepresentation(new_rep);
113 AddDependantsToWorklist(h_infer);
114 }
115}
116
117
118void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
119 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
120 h_infer->AddToWorklist(it.value());
121 }
122 for (int i = 0; i < OperandCount(); ++i) {
123 h_infer->AddToWorklist(OperandAt(i));
124 }
125}
126
127
128static int32_t ConvertAndSetOverflow(Representation r,
129 int64_t result,
130 bool* overflow) {
131 if (r.IsSmi()) {
132 if (result > Smi::kMaxValue) {
133 *overflow = true;
134 return Smi::kMaxValue;
135 }
136 if (result < Smi::kMinValue) {
137 *overflow = true;
138 return Smi::kMinValue;
139 }
140 } else {
141 if (result > kMaxInt) {
142 *overflow = true;
143 return kMaxInt;
144 }
145 if (result < kMinInt) {
146 *overflow = true;
147 return kMinInt;
148 }
149 }
150 return static_cast<int32_t>(result);
151}
152
153
154static int32_t AddWithoutOverflow(Representation r,
155 int32_t a,
156 int32_t b,
157 bool* overflow) {
158 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
159 return ConvertAndSetOverflow(r, result, overflow);
160}
161
162
163static int32_t SubWithoutOverflow(Representation r,
164 int32_t a,
165 int32_t b,
166 bool* overflow) {
167 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
168 return ConvertAndSetOverflow(r, result, overflow);
169}
170
171
172static int32_t MulWithoutOverflow(const Representation& r,
173 int32_t a,
174 int32_t b,
175 bool* overflow) {
176 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
177 return ConvertAndSetOverflow(r, result, overflow);
178}
179
180
181int32_t Range::Mask() const {
182 if (lower_ == upper_) return lower_;
183 if (lower_ >= 0) {
184 int32_t res = 1;
185 while (res < upper_) {
186 res = (res << 1) | 1;
187 }
188 return res;
189 }
190 return 0xffffffff;
191}
192
193
194void Range::AddConstant(int32_t value) {
195 if (value == 0) return;
196 bool may_overflow = false; // Overflow is ignored here.
197 Representation r = Representation::Integer32();
198 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
199 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
200#ifdef DEBUG
201 Verify();
202#endif
203}
204
205
206void Range::Intersect(Range* other) {
207 upper_ = Min(upper_, other->upper_);
208 lower_ = Max(lower_, other->lower_);
209 bool b = CanBeMinusZero() && other->CanBeMinusZero();
210 set_can_be_minus_zero(b);
211}
212
213
214void Range::Union(Range* other) {
215 upper_ = Max(upper_, other->upper_);
216 lower_ = Min(lower_, other->lower_);
217 bool b = CanBeMinusZero() || other->CanBeMinusZero();
218 set_can_be_minus_zero(b);
219}
220
221
222void Range::CombinedMax(Range* other) {
223 upper_ = Max(upper_, other->upper_);
224 lower_ = Max(lower_, other->lower_);
225 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
226}
227
228
229void Range::CombinedMin(Range* other) {
230 upper_ = Min(upper_, other->upper_);
231 lower_ = Min(lower_, other->lower_);
232 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
233}
234
235
236void Range::Sar(int32_t value) {
237 int32_t bits = value & 0x1F;
238 lower_ = lower_ >> bits;
239 upper_ = upper_ >> bits;
240 set_can_be_minus_zero(false);
241}
242
243
244void Range::Shl(int32_t value) {
245 int32_t bits = value & 0x1F;
246 int old_lower = lower_;
247 int old_upper = upper_;
248 lower_ = lower_ << bits;
249 upper_ = upper_ << bits;
250 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
251 upper_ = kMaxInt;
252 lower_ = kMinInt;
253 }
254 set_can_be_minus_zero(false);
255}
256
257
258bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
259 bool may_overflow = false;
260 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
261 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
262 KeepOrder();
263#ifdef DEBUG
264 Verify();
265#endif
266 return may_overflow;
267}
268
269
270bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
271 bool may_overflow = false;
272 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
273 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
274 KeepOrder();
275#ifdef DEBUG
276 Verify();
277#endif
278 return may_overflow;
279}
280
281
282void Range::KeepOrder() {
283 if (lower_ > upper_) {
284 int32_t tmp = lower_;
285 lower_ = upper_;
286 upper_ = tmp;
287 }
288}
289
290
291#ifdef DEBUG
292void Range::Verify() const {
293 DCHECK(lower_ <= upper_);
294}
295#endif
296
297
298bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
299 bool may_overflow = false;
300 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
301 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
302 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
303 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
304 lower_ = Min(Min(v1, v2), Min(v3, v4));
305 upper_ = Max(Max(v1, v2), Max(v3, v4));
306#ifdef DEBUG
307 Verify();
308#endif
309 return may_overflow;
310}
311
312
313bool HValue::IsDefinedAfter(HBasicBlock* other) const {
314 return block()->block_id() > other->block_id();
315}
316
317
318HUseListNode* HUseListNode::tail() {
319 // Skip and remove dead items in the use list.
320 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
321 tail_ = tail_->tail_;
322 }
323 return tail_;
324}
325
326
327bool HValue::CheckUsesForFlag(Flag f) const {
328 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
329 if (it.value()->IsSimulate()) continue;
330 if (!it.value()->CheckFlag(f)) return false;
331 }
332 return true;
333}
334
335
336bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
337 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
338 if (it.value()->IsSimulate()) continue;
339 if (!it.value()->CheckFlag(f)) {
340 *value = it.value();
341 return false;
342 }
343 }
344 return true;
345}
346
347
348bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
349 bool return_value = false;
350 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
351 if (it.value()->IsSimulate()) continue;
352 if (!it.value()->CheckFlag(f)) return false;
353 return_value = true;
354 }
355 return return_value;
356}
357
358
359HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
360 Advance();
361}
362
363
364void HUseIterator::Advance() {
365 current_ = next_;
366 if (current_ != NULL) {
367 next_ = current_->tail();
368 value_ = current_->value();
369 index_ = current_->index();
370 }
371}
372
373
374int HValue::UseCount() const {
375 int count = 0;
376 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
377 return count;
378}
379
380
381HUseListNode* HValue::RemoveUse(HValue* value, int index) {
382 HUseListNode* previous = NULL;
383 HUseListNode* current = use_list_;
384 while (current != NULL) {
385 if (current->value() == value && current->index() == index) {
386 if (previous == NULL) {
387 use_list_ = current->tail();
388 } else {
389 previous->set_tail(current->tail());
390 }
391 break;
392 }
393
394 previous = current;
395 current = current->tail();
396 }
397
398#ifdef DEBUG
399 // Do not reuse use list nodes in debug mode, zap them.
400 if (current != NULL) {
401 HUseListNode* temp =
402 new(block()->zone())
403 HUseListNode(current->value(), current->index(), NULL);
404 current->Zap();
405 current = temp;
406 }
407#endif
408 return current;
409}
410
411
412bool HValue::Equals(HValue* other) {
413 if (other->opcode() != opcode()) return false;
414 if (!other->representation().Equals(representation())) return false;
415 if (!other->type_.Equals(type_)) return false;
416 if (other->flags() != flags()) return false;
417 if (OperandCount() != other->OperandCount()) return false;
418 for (int i = 0; i < OperandCount(); ++i) {
419 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
420 }
421 bool result = DataEquals(other);
422 DCHECK(!result || Hashcode() == other->Hashcode());
423 return result;
424}
425
426
427intptr_t HValue::Hashcode() {
428 intptr_t result = opcode();
429 int count = OperandCount();
430 for (int i = 0; i < count; ++i) {
431 result = result * 19 + OperandAt(i)->id() + (result >> 7);
432 }
433 return result;
434}
435
436
437const char* HValue::Mnemonic() const {
438 switch (opcode()) {
439#define MAKE_CASE(type) case k##type: return #type;
440 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
441#undef MAKE_CASE
442 case kPhi: return "Phi";
443 default: return "";
444 }
445}
446
447
448bool HValue::CanReplaceWithDummyUses() {
449 return FLAG_unreachable_code_elimination &&
450 !(block()->IsReachable() ||
451 IsBlockEntry() ||
452 IsControlInstruction() ||
453 IsArgumentsObject() ||
454 IsCapturedObject() ||
455 IsSimulate() ||
456 IsEnterInlined() ||
457 IsLeaveInlined());
458}
459
460
461bool HValue::IsInteger32Constant() {
462 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
463}
464
465
466int32_t HValue::GetInteger32Constant() {
467 return HConstant::cast(this)->Integer32Value();
468}
469
470
471bool HValue::EqualsInteger32Constant(int32_t value) {
472 return IsInteger32Constant() && GetInteger32Constant() == value;
473}
474
475
476void HValue::SetOperandAt(int index, HValue* value) {
477 RegisterUse(index, value);
478 InternalSetOperandAt(index, value);
479}
480
481
482void HValue::DeleteAndReplaceWith(HValue* other) {
483 // We replace all uses first, so Delete can assert that there are none.
484 if (other != NULL) ReplaceAllUsesWith(other);
485 Kill();
486 DeleteFromGraph();
487}
488
489
490void HValue::ReplaceAllUsesWith(HValue* other) {
491 while (use_list_ != NULL) {
492 HUseListNode* list_node = use_list_;
493 HValue* value = list_node->value();
494 DCHECK(!value->block()->IsStartBlock());
495 value->InternalSetOperandAt(list_node->index(), other);
496 use_list_ = list_node->tail();
497 list_node->set_tail(other->use_list_);
498 other->use_list_ = list_node;
499 }
500}
501
502
503void HValue::Kill() {
504 // Instead of going through the entire use list of each operand, we only
505 // check the first item in each use list and rely on the tail() method to
506 // skip dead items, removing them lazily next time we traverse the list.
507 SetFlag(kIsDead);
508 for (int i = 0; i < OperandCount(); ++i) {
509 HValue* operand = OperandAt(i);
510 if (operand == NULL) continue;
511 HUseListNode* first = operand->use_list_;
512 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
513 operand->use_list_ = first->tail();
514 }
515 }
516}
517
518
519void HValue::SetBlock(HBasicBlock* block) {
520 DCHECK(block_ == NULL || block == NULL);
521 block_ = block;
522 if (id_ == kNoNumber && block != NULL) {
523 id_ = block->graph()->GetNextValueID(this);
524 }
525}
526
527
528std::ostream& operator<<(std::ostream& os, const HValue& v) {
529 return v.PrintTo(os);
530}
531
532
533std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
534 if (t.value->representation().IsTagged() &&
535 !t.value->type().Equals(HType::Tagged()))
536 return os;
537 return os << " type:" << t.value->type();
538}
539
540
541std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
542 GVNFlagSet changes_flags = c.value->ChangesFlags();
543 if (changes_flags.IsEmpty()) return os;
544 os << " changes[";
545 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
546 os << "*";
547 } else {
548 bool add_comma = false;
549#define PRINT_DO(Type) \
550 if (changes_flags.Contains(k##Type)) { \
551 if (add_comma) os << ","; \
552 add_comma = true; \
553 os << #Type; \
554 }
555 GVN_TRACKED_FLAG_LIST(PRINT_DO);
556 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
557#undef PRINT_DO
558 }
559 return os << "]";
560}
561
562
563bool HValue::HasMonomorphicJSObjectType() {
564 return !GetMonomorphicJSObjectMap().is_null();
565}
566
567
568bool HValue::UpdateInferredType() {
569 HType type = CalculateInferredType();
570 bool result = (!type.Equals(type_));
571 type_ = type;
572 return result;
573}
574
575
576void HValue::RegisterUse(int index, HValue* new_value) {
577 HValue* old_value = OperandAt(index);
578 if (old_value == new_value) return;
579
580 HUseListNode* removed = NULL;
581 if (old_value != NULL) {
582 removed = old_value->RemoveUse(this, index);
583 }
584
585 if (new_value != NULL) {
586 if (removed == NULL) {
587 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
588 this, index, new_value->use_list_);
589 } else {
590 removed->set_tail(new_value->use_list_);
591 new_value->use_list_ = removed;
592 }
593 }
594}
595
596
597void HValue::AddNewRange(Range* r, Zone* zone) {
598 if (!HasRange()) ComputeInitialRange(zone);
599 if (!HasRange()) range_ = new(zone) Range();
600 DCHECK(HasRange());
601 r->StackUpon(range_);
602 range_ = r;
603}
604
605
606void HValue::RemoveLastAddedRange() {
607 DCHECK(HasRange());
608 DCHECK(range_->next() != NULL);
609 range_ = range_->next();
610}
611
612
613void HValue::ComputeInitialRange(Zone* zone) {
614 DCHECK(!HasRange());
615 range_ = InferRange(zone);
616 DCHECK(HasRange());
617}
618
619
620std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
621 os << Mnemonic() << " ";
622 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
623 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
624 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
625 return os;
626}
627
628
629std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
630 for (int i = 0; i < OperandCount(); ++i) {
631 if (i > 0) os << " ";
632 os << NameOf(OperandAt(i));
633 }
634 return os;
635}
636
637
638void HInstruction::Unlink() {
639 DCHECK(IsLinked());
640 DCHECK(!IsControlInstruction()); // Must never move control instructions.
641 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
642 DCHECK(previous_ != NULL);
643 previous_->next_ = next_;
644 if (next_ == NULL) {
645 DCHECK(block()->last() == this);
646 block()->set_last(previous_);
647 } else {
648 next_->previous_ = previous_;
649 }
650 clear_block();
651}
652
653
654void HInstruction::InsertBefore(HInstruction* next) {
655 DCHECK(!IsLinked());
656 DCHECK(!next->IsBlockEntry());
657 DCHECK(!IsControlInstruction());
658 DCHECK(!next->block()->IsStartBlock());
659 DCHECK(next->previous_ != NULL);
660 HInstruction* prev = next->previous();
661 prev->next_ = this;
662 next->previous_ = this;
663 next_ = next;
664 previous_ = prev;
665 SetBlock(next->block());
666 if (!has_position() && next->has_position()) {
667 set_position(next->position());
668 }
669}
670
671
672void HInstruction::InsertAfter(HInstruction* previous) {
673 DCHECK(!IsLinked());
674 DCHECK(!previous->IsControlInstruction());
675 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
676 HBasicBlock* block = previous->block();
677 // Never insert anything except constants into the start block after finishing
678 // it.
679 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
680 DCHECK(block->end()->SecondSuccessor() == NULL);
681 InsertAfter(block->end()->FirstSuccessor()->first());
682 return;
683 }
684
685 // If we're inserting after an instruction with side-effects that is
686 // followed by a simulate instruction, we need to insert after the
687 // simulate instruction instead.
688 HInstruction* next = previous->next_;
689 if (previous->HasObservableSideEffects() && next != NULL) {
690 DCHECK(next->IsSimulate());
691 previous = next;
692 next = previous->next_;
693 }
694
695 previous_ = previous;
696 next_ = next;
697 SetBlock(block);
698 previous->next_ = this;
699 if (next != NULL) next->previous_ = this;
700 if (block->last() == previous) {
701 block->set_last(this);
702 }
703 if (!has_position() && previous->has_position()) {
704 set_position(previous->position());
705 }
706}
707
708
709bool HInstruction::Dominates(HInstruction* other) {
710 if (block() != other->block()) {
711 return block()->Dominates(other->block());
712 }
713 // Both instructions are in the same basic block. This instruction
714 // should precede the other one in order to dominate it.
715 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
716 if (instr == other) {
717 return true;
718 }
719 }
720 return false;
721}
722
723
724#ifdef DEBUG
725void HInstruction::Verify() {
726 // Verify that input operands are defined before use.
727 HBasicBlock* cur_block = block();
728 for (int i = 0; i < OperandCount(); ++i) {
729 HValue* other_operand = OperandAt(i);
730 if (other_operand == NULL) continue;
731 HBasicBlock* other_block = other_operand->block();
732 if (cur_block == other_block) {
733 if (!other_operand->IsPhi()) {
734 HInstruction* cur = this->previous();
735 while (cur != NULL) {
736 if (cur == other_operand) break;
737 cur = cur->previous();
738 }
739 // Must reach other operand in the same block!
740 DCHECK(cur == other_operand);
741 }
742 } else {
743 // If the following assert fires, you may have forgotten an
744 // AddInstruction.
745 DCHECK(other_block->Dominates(cur_block));
746 }
747 }
748
749 // Verify that instructions that may have side-effects are followed
750 // by a simulate instruction.
751 if (HasObservableSideEffects() && !IsOsrEntry()) {
752 DCHECK(next()->IsSimulate());
753 }
754
755 // Verify that instructions that can be eliminated by GVN have overridden
756 // HValue::DataEquals. The default implementation is UNREACHABLE. We
757 // don't actually care whether DataEquals returns true or false here.
758 if (CheckFlag(kUseGVN)) DataEquals(this);
759
760 // Verify that all uses are in the graph.
761 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
762 if (use.value()->IsInstruction()) {
763 DCHECK(HInstruction::cast(use.value())->IsLinked());
764 }
765 }
766}
767#endif
768
769
770bool HInstruction::CanDeoptimize() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000771 switch (opcode()) {
772 case HValue::kAbnormalExit:
773 case HValue::kAccessArgumentsAt:
774 case HValue::kAllocate:
775 case HValue::kArgumentsElements:
776 case HValue::kArgumentsLength:
777 case HValue::kArgumentsObject:
778 case HValue::kBlockEntry:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000779 case HValue::kCallNewArray:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000780 case HValue::kCapturedObject:
781 case HValue::kClassOfTestAndBranch:
782 case HValue::kCompareGeneric:
783 case HValue::kCompareHoleAndBranch:
784 case HValue::kCompareMap:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000785 case HValue::kCompareNumericAndBranch:
786 case HValue::kCompareObjectEqAndBranch:
787 case HValue::kConstant:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000788 case HValue::kContext:
789 case HValue::kDebugBreak:
790 case HValue::kDeclareGlobals:
791 case HValue::kDoubleBits:
792 case HValue::kDummyUse:
793 case HValue::kEnterInlined:
794 case HValue::kEnvironmentMarker:
795 case HValue::kForceRepresentation:
796 case HValue::kGetCachedArrayIndex:
797 case HValue::kGoto:
798 case HValue::kHasCachedArrayIndexAndBranch:
799 case HValue::kHasInstanceTypeAndBranch:
800 case HValue::kInnerAllocatedObject:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000801 case HValue::kIsSmiAndBranch:
802 case HValue::kIsStringAndBranch:
803 case HValue::kIsUndetectableAndBranch:
804 case HValue::kLeaveInlined:
805 case HValue::kLoadFieldByIndex:
806 case HValue::kLoadGlobalGeneric:
807 case HValue::kLoadNamedField:
808 case HValue::kLoadNamedGeneric:
809 case HValue::kLoadRoot:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 case HValue::kMathMinMax:
811 case HValue::kParameter:
812 case HValue::kPhi:
813 case HValue::kPushArguments:
814 case HValue::kReturn:
815 case HValue::kSeqStringGetChar:
816 case HValue::kStoreCodeEntry:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000817 case HValue::kStoreKeyed:
818 case HValue::kStoreNamedField:
819 case HValue::kStoreNamedGeneric:
820 case HValue::kStringCharCodeAt:
821 case HValue::kStringCharFromCode:
822 case HValue::kThisFunction:
823 case HValue::kTypeofIsAndBranch:
824 case HValue::kUnknownOSRValue:
825 case HValue::kUseConst:
826 return false;
827
828 case HValue::kAdd:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000829 case HValue::kApplyArguments:
830 case HValue::kBitwise:
831 case HValue::kBoundsCheck:
832 case HValue::kBranch:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000833 case HValue::kCallRuntime:
834 case HValue::kCallWithDescriptor:
835 case HValue::kChange:
836 case HValue::kCheckArrayBufferNotNeutered:
837 case HValue::kCheckHeapObject:
838 case HValue::kCheckInstanceType:
839 case HValue::kCheckMapValue:
840 case HValue::kCheckMaps:
841 case HValue::kCheckSmi:
842 case HValue::kCheckValue:
843 case HValue::kClampToUint8:
844 case HValue::kDeoptimize:
845 case HValue::kDiv:
846 case HValue::kForInCacheArray:
847 case HValue::kForInPrepareMap:
848 case HValue::kHasInPrototypeChainAndBranch:
849 case HValue::kInvokeFunction:
850 case HValue::kLoadContextSlot:
851 case HValue::kLoadFunctionPrototype:
852 case HValue::kLoadKeyed:
853 case HValue::kLoadKeyedGeneric:
854 case HValue::kMathFloorOfDiv:
855 case HValue::kMaybeGrowElements:
856 case HValue::kMod:
857 case HValue::kMul:
858 case HValue::kOsrEntry:
859 case HValue::kPower:
860 case HValue::kPrologue:
861 case HValue::kRor:
862 case HValue::kSar:
863 case HValue::kSeqStringSetChar:
864 case HValue::kShl:
865 case HValue::kShr:
866 case HValue::kSimulate:
867 case HValue::kStackCheck:
868 case HValue::kStoreContextSlot:
869 case HValue::kStoreKeyedGeneric:
870 case HValue::kStringAdd:
871 case HValue::kStringCompareAndBranch:
872 case HValue::kSub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873 case HValue::kTransitionElementsKind:
874 case HValue::kTrapAllocationMemento:
875 case HValue::kTypeof:
876 case HValue::kUnaryMathOperation:
877 case HValue::kWrapReceiver:
878 return true;
879 }
880 UNREACHABLE();
881 return true;
882}
883
884
885std::ostream& operator<<(std::ostream& os, const NameOf& v) {
886 return os << v.value->representation().Mnemonic() << v.value->id();
887}
888
889std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
890 return os << NameOf(value());
891}
892
893
894std::ostream& HEnvironmentMarker::PrintDataTo(
895 std::ostream& os) const { // NOLINT
896 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
897 << "]";
898}
899
900
901std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
902 return os << NameOf(value()) << " #" << argument_count();
903}
904
905
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000906std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
907 return os << NameOf(first()) << " " << NameOf(second()) << " #"
908 << argument_count();
909}
910
Ben Murdochda12d292016-06-02 14:46:10 +0100911std::ostream& HInvokeFunction::PrintTo(std::ostream& os) const { // NOLINT
912 if (tail_call_mode() == TailCallMode::kAllow) os << "Tail";
913 return HBinaryCall::PrintTo(os);
914}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000915
Ben Murdochda12d292016-06-02 14:46:10 +0100916std::ostream& HInvokeFunction::PrintDataTo(std::ostream& os) const { // NOLINT
917 HBinaryCall::PrintDataTo(os);
918 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
919 os << ", JSTailCall";
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000920 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000921 return os;
922}
923
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
925 os << NameOf(index()) << " " << NameOf(length());
926 if (base() != NULL && (offset() != 0 || scale() != 0)) {
927 os << " base: ((";
928 if (base() != index()) {
929 os << NameOf(index());
930 } else {
931 os << "index";
932 }
933 os << " + " << offset() << ") >> " << scale() << ")";
934 }
935 if (skip_check()) os << " [DISABLED]";
936 return os;
937}
938
939
940void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
941 DCHECK(CheckFlag(kFlexibleRepresentation));
942 HValue* actual_index = index()->ActualValue();
943 HValue* actual_length = length()->ActualValue();
944 Representation index_rep = actual_index->representation();
945 Representation length_rep = actual_length->representation();
946 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
947 index_rep = Representation::Smi();
948 }
949 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
950 length_rep = Representation::Smi();
951 }
952 Representation r = index_rep.generalize(length_rep);
953 if (r.is_more_general_than(Representation::Integer32())) {
954 r = Representation::Integer32();
955 }
956 UpdateRepresentation(r, h_infer, "boundscheck");
957}
958
959
960Range* HBoundsCheck::InferRange(Zone* zone) {
961 Representation r = representation();
962 if (r.IsSmiOrInteger32() && length()->HasRange()) {
963 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
964 int lower = 0;
965
966 Range* result = new(zone) Range(lower, upper);
967 if (index()->HasRange()) {
968 result->Intersect(index()->range());
969 }
970
971 // In case of Smi representation, clamp result to Smi::kMaxValue.
972 if (r.IsSmi()) result->ClampToSmi();
973 return result;
974 }
975 return HValue::InferRange(zone);
976}
977
978
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000979std::ostream& HCallWithDescriptor::PrintDataTo(
980 std::ostream& os) const { // NOLINT
981 for (int i = 0; i < OperandCount(); i++) {
982 os << NameOf(OperandAt(i)) << " ";
983 }
Ben Murdochda12d292016-06-02 14:46:10 +0100984 os << "#" << argument_count();
985 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
986 os << ", JSTailCall";
987 }
988 return os;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000989}
990
991
992std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
993 os << ElementsKindToString(elements_kind()) << " ";
994 return HBinaryCall::PrintDataTo(os);
995}
996
997
998std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
999 os << function()->name << " ";
1000 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1001 return os << "#" << argument_count();
1002}
1003
1004
1005std::ostream& HClassOfTestAndBranch::PrintDataTo(
1006 std::ostream& os) const { // NOLINT
1007 return os << "class_of_test(" << NameOf(value()) << ", \""
1008 << class_name()->ToCString().get() << "\")";
1009}
1010
1011
1012std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1013 return os << NameOf(receiver()) << " " << NameOf(function());
1014}
1015
1016
1017std::ostream& HAccessArgumentsAt::PrintDataTo(
1018 std::ostream& os) const { // NOLINT
1019 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1020 << NameOf(length());
1021}
1022
1023
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001024std::ostream& HControlInstruction::PrintDataTo(
1025 std::ostream& os) const { // NOLINT
1026 os << " goto (";
1027 bool first_block = true;
1028 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1029 if (!first_block) os << ", ";
1030 os << *it.Current();
1031 first_block = false;
1032 }
1033 return os << ")";
1034}
1035
1036
1037std::ostream& HUnaryControlInstruction::PrintDataTo(
1038 std::ostream& os) const { // NOLINT
1039 os << NameOf(value());
1040 return HControlInstruction::PrintDataTo(os);
1041}
1042
1043
1044std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1045 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1046 << " values)";
1047}
1048
1049
1050Representation HBranch::observed_input_representation(int index) {
Ben Murdochda12d292016-06-02 14:46:10 +01001051 if (expected_input_types_.Contains(ToBooleanICStub::NULL_TYPE) ||
1052 expected_input_types_.Contains(ToBooleanICStub::SPEC_OBJECT) ||
1053 expected_input_types_.Contains(ToBooleanICStub::STRING) ||
1054 expected_input_types_.Contains(ToBooleanICStub::SYMBOL) ||
1055 expected_input_types_.Contains(ToBooleanICStub::SIMD_VALUE)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001056 return Representation::Tagged();
1057 }
Ben Murdochda12d292016-06-02 14:46:10 +01001058 if (expected_input_types_.Contains(ToBooleanICStub::UNDEFINED)) {
1059 if (expected_input_types_.Contains(ToBooleanICStub::HEAP_NUMBER)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001060 return Representation::Double();
1061 }
1062 return Representation::Tagged();
1063 }
Ben Murdochda12d292016-06-02 14:46:10 +01001064 if (expected_input_types_.Contains(ToBooleanICStub::HEAP_NUMBER)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001065 return Representation::Double();
1066 }
Ben Murdochda12d292016-06-02 14:46:10 +01001067 if (expected_input_types_.Contains(ToBooleanICStub::SMI)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001068 return Representation::Smi();
1069 }
1070 return Representation::None();
1071}
1072
1073
1074bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1075 HValue* value = this->value();
1076 if (value->EmitAtUses()) {
1077 DCHECK(value->IsConstant());
1078 DCHECK(!value->representation().IsDouble());
1079 *block = HConstant::cast(value)->BooleanValue()
1080 ? FirstSuccessor()
1081 : SecondSuccessor();
1082 return true;
1083 }
1084 *block = NULL;
1085 return false;
1086}
1087
1088
1089std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1090 return HUnaryControlInstruction::PrintDataTo(os) << " "
1091 << expected_input_types();
1092}
1093
1094
1095std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1096 os << NameOf(value()) << " (" << *map().handle() << ")";
1097 HControlInstruction::PrintDataTo(os);
1098 if (known_successor_index() == 0) {
1099 os << " [true]";
1100 } else if (known_successor_index() == 1) {
1101 os << " [false]";
1102 }
1103 return os;
1104}
1105
1106
1107const char* HUnaryMathOperation::OpName() const {
1108 switch (op()) {
1109 case kMathFloor:
1110 return "floor";
1111 case kMathFround:
1112 return "fround";
1113 case kMathRound:
1114 return "round";
1115 case kMathAbs:
1116 return "abs";
Ben Murdoch61f157c2016-09-16 13:49:30 +01001117 case kMathCos:
1118 return "cos";
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001119 case kMathLog:
1120 return "log";
1121 case kMathExp:
1122 return "exp";
Ben Murdoch61f157c2016-09-16 13:49:30 +01001123 case kMathSin:
1124 return "sin";
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001125 case kMathSqrt:
1126 return "sqrt";
1127 case kMathPowHalf:
1128 return "pow-half";
1129 case kMathClz32:
1130 return "clz32";
1131 default:
1132 UNREACHABLE();
1133 return NULL;
1134 }
1135}
1136
1137
1138Range* HUnaryMathOperation::InferRange(Zone* zone) {
1139 Representation r = representation();
1140 if (op() == kMathClz32) return new(zone) Range(0, 32);
1141 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1142 if (op() == kMathAbs) {
1143 int upper = value()->range()->upper();
1144 int lower = value()->range()->lower();
1145 bool spans_zero = value()->range()->CanBeZero();
1146 // Math.abs(kMinInt) overflows its representation, on which the
1147 // instruction deopts. Hence clamp it to kMaxInt.
1148 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1149 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1150 Range* result =
1151 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1152 Max(abs_lower, abs_upper));
1153 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1154 // Smi::kMaxValue.
1155 if (r.IsSmi()) result->ClampToSmi();
1156 return result;
1157 }
1158 }
1159 return HValue::InferRange(zone);
1160}
1161
1162
1163std::ostream& HUnaryMathOperation::PrintDataTo(
1164 std::ostream& os) const { // NOLINT
1165 return os << OpName() << " " << NameOf(value());
1166}
1167
1168
1169std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1170 return os << NameOf(value());
1171}
1172
1173
1174std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1175 std::ostream& os) const { // NOLINT
1176 os << NameOf(value());
1177 switch (from_) {
1178 case FIRST_JS_RECEIVER_TYPE:
1179 if (to_ == LAST_TYPE) os << " spec_object";
1180 break;
1181 case JS_REGEXP_TYPE:
1182 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1183 break;
1184 case JS_ARRAY_TYPE:
1185 if (to_ == JS_ARRAY_TYPE) os << " array";
1186 break;
1187 case JS_FUNCTION_TYPE:
1188 if (to_ == JS_FUNCTION_TYPE) os << " function";
1189 break;
1190 default:
1191 break;
1192 }
1193 return os;
1194}
1195
1196
1197std::ostream& HTypeofIsAndBranch::PrintDataTo(
1198 std::ostream& os) const { // NOLINT
1199 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1200 return HControlInstruction::PrintDataTo(os);
1201}
1202
1203
1204namespace {
1205
1206String* TypeOfString(HConstant* constant, Isolate* isolate) {
1207 Heap* heap = isolate->heap();
1208 if (constant->HasNumberValue()) return heap->number_string();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001209 if (constant->HasStringValue()) return heap->string_string();
1210 switch (constant->GetInstanceType()) {
1211 case ODDBALL_TYPE: {
1212 Unique<Object> unique = constant->GetUnique();
1213 if (unique.IsKnownGlobal(heap->true_value()) ||
1214 unique.IsKnownGlobal(heap->false_value())) {
1215 return heap->boolean_string();
1216 }
1217 if (unique.IsKnownGlobal(heap->null_value())) {
1218 return heap->object_string();
1219 }
1220 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1221 return heap->undefined_string();
1222 }
1223 case SYMBOL_TYPE:
1224 return heap->symbol_string();
1225 case SIMD128_VALUE_TYPE: {
1226 Unique<Map> map = constant->ObjectMap();
1227#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
1228 if (map.IsKnownGlobal(heap->type##_map())) { \
1229 return heap->type##_string(); \
1230 }
1231 SIMD128_TYPES(SIMD128_TYPE)
1232#undef SIMD128_TYPE
1233 UNREACHABLE();
1234 return nullptr;
1235 }
1236 default:
Ben Murdochc5610432016-08-08 18:44:38 +01001237 if (constant->IsUndetectable()) return heap->undefined_string();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001238 if (constant->IsCallable()) return heap->function_string();
1239 return heap->object_string();
1240 }
1241}
1242
1243} // namespace
1244
1245
1246bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1247 if (FLAG_fold_constants && value()->IsConstant()) {
1248 HConstant* constant = HConstant::cast(value());
1249 String* type_string = TypeOfString(constant, isolate());
1250 bool same_type = type_literal_.IsKnownGlobal(type_string);
1251 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1252 return true;
1253 } else if (value()->representation().IsSpecialization()) {
1254 bool number_type =
1255 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1256 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1257 return true;
1258 }
1259 *block = NULL;
1260 return false;
1261}
1262
1263
1264std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1265 return os << NameOf(value()) << " " << NameOf(map());
1266}
1267
1268
1269HValue* HCheckMapValue::Canonicalize() {
1270 if (map()->IsConstant()) {
1271 HConstant* c_map = HConstant::cast(map());
1272 return HCheckMaps::CreateAndInsertAfter(
1273 block()->graph()->zone(), value(), c_map->MapValue(),
1274 c_map->HasStableMapValue(), this);
1275 }
1276 return this;
1277}
1278
1279
1280std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1281 return os << NameOf(enumerable());
1282}
1283
1284
1285std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1286 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1287 << "]";
1288}
1289
1290
1291std::ostream& HLoadFieldByIndex::PrintDataTo(
1292 std::ostream& os) const { // NOLINT
1293 return os << NameOf(object()) << " " << NameOf(index());
1294}
1295
1296
1297static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1298 if (!l->EqualsInteger32Constant(~0)) return false;
1299 *negated = r;
1300 return true;
1301}
1302
1303
1304static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1305 if (!instr->IsBitwise()) return false;
1306 HBitwise* b = HBitwise::cast(instr);
1307 return (b->op() == Token::BIT_XOR) &&
1308 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1309 MatchLeftIsOnes(b->right(), b->left(), negated));
1310}
1311
1312
1313static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1314 HValue* negated;
1315 return MatchNegationViaXor(instr, &negated) &&
1316 MatchNegationViaXor(negated, arg);
1317}
1318
1319
1320HValue* HBitwise::Canonicalize() {
1321 if (!representation().IsSmiOrInteger32()) return this;
1322 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1323 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1324 if (left()->EqualsInteger32Constant(nop_constant) &&
1325 !right()->CheckFlag(kUint32)) {
1326 return right();
1327 }
1328 if (right()->EqualsInteger32Constant(nop_constant) &&
1329 !left()->CheckFlag(kUint32)) {
1330 return left();
1331 }
1332 // Optimize double negation, a common pattern used for ToInt32(x).
1333 HValue* arg;
1334 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1335 return arg;
1336 }
1337 return this;
1338}
1339
1340
1341// static
1342HInstruction* HAdd::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01001343 HValue* left, HValue* right,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001344 ExternalAddType external_add_type) {
1345 // For everything else, you should use the other factory method without
1346 // ExternalAddType.
1347 DCHECK_EQ(external_add_type, AddOfExternalAndTagged);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001348 return new (zone) HAdd(context, left, right, external_add_type);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001349}
1350
1351
1352Representation HAdd::RepresentationFromInputs() {
1353 Representation left_rep = left()->representation();
1354 if (left_rep.IsExternal()) {
1355 return Representation::External();
1356 }
1357 return HArithmeticBinaryOperation::RepresentationFromInputs();
1358}
1359
1360
1361Representation HAdd::RequiredInputRepresentation(int index) {
1362 if (index == 2) {
1363 Representation left_rep = left()->representation();
1364 if (left_rep.IsExternal()) {
1365 if (external_add_type_ == AddOfExternalAndTagged) {
1366 return Representation::Tagged();
1367 } else {
1368 return Representation::Integer32();
1369 }
1370 }
1371 }
1372 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1373}
1374
1375
1376static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1377 return arg1->representation().IsSpecialization() &&
1378 arg2->EqualsInteger32Constant(identity);
1379}
1380
1381
1382HValue* HAdd::Canonicalize() {
1383 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1384 if (IsIdentityOperation(left(), right(), 0) &&
1385 !left()->representation().IsDouble()) { // Left could be -0.
1386 return left();
1387 }
1388 if (IsIdentityOperation(right(), left(), 0) &&
1389 !left()->representation().IsDouble()) { // Right could be -0.
1390 return right();
1391 }
1392 return this;
1393}
1394
1395
1396HValue* HSub::Canonicalize() {
1397 if (IsIdentityOperation(left(), right(), 0)) return left();
1398 return this;
1399}
1400
1401
1402HValue* HMul::Canonicalize() {
1403 if (IsIdentityOperation(left(), right(), 1)) return left();
1404 if (IsIdentityOperation(right(), left(), 1)) return right();
1405 return this;
1406}
1407
1408
1409bool HMul::MulMinusOne() {
1410 if (left()->EqualsInteger32Constant(-1) ||
1411 right()->EqualsInteger32Constant(-1)) {
1412 return true;
1413 }
1414
1415 return false;
1416}
1417
1418
1419HValue* HMod::Canonicalize() {
1420 return this;
1421}
1422
1423
1424HValue* HDiv::Canonicalize() {
1425 if (IsIdentityOperation(left(), right(), 1)) return left();
1426 return this;
1427}
1428
1429
1430HValue* HChange::Canonicalize() {
1431 return (from().Equals(to())) ? value() : this;
1432}
1433
1434
1435HValue* HWrapReceiver::Canonicalize() {
1436 if (HasNoUses()) return NULL;
1437 if (receiver()->type().IsJSReceiver()) {
1438 return receiver();
1439 }
1440 return this;
1441}
1442
1443
1444std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1445 return os << NameOf(value());
1446}
1447
1448
1449HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1450 HValue* context, HValue* value,
1451 Representation representation) {
1452 if (FLAG_fold_constants && value->IsConstant()) {
1453 HConstant* c = HConstant::cast(value);
1454 c = c->CopyToRepresentation(representation, zone);
1455 if (c != NULL) return c;
1456 }
1457 return new(zone) HForceRepresentation(value, representation);
1458}
1459
1460
1461std::ostream& HForceRepresentation::PrintDataTo(
1462 std::ostream& os) const { // NOLINT
1463 return os << representation().Mnemonic() << " " << NameOf(value());
1464}
1465
1466
1467std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1468 HUnaryOperation::PrintDataTo(os);
1469 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1470
1471 if (CanTruncateToSmi()) os << " truncating-smi";
1472 if (CanTruncateToInt32()) os << " truncating-int32";
1473 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1474 if (CheckFlag(kAllowUndefinedAsNaN)) os << " allow-undefined-as-nan";
1475 return os;
1476}
1477
1478
1479HValue* HUnaryMathOperation::Canonicalize() {
1480 if (op() == kMathRound || op() == kMathFloor) {
1481 HValue* val = value();
1482 if (val->IsChange()) val = HChange::cast(val)->value();
1483 if (val->representation().IsSmiOrInteger32()) {
1484 if (val->representation().Equals(representation())) return val;
1485 return Prepend(new(block()->zone()) HChange(
1486 val, representation(), false, false));
1487 }
1488 }
Ben Murdochda12d292016-06-02 14:46:10 +01001489 if (op() == kMathFloor && representation().IsSmiOrInteger32() &&
1490 value()->IsDiv() && value()->HasOneUse()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001491 HDiv* hdiv = HDiv::cast(value());
1492
1493 HValue* left = hdiv->left();
1494 if (left->representation().IsInteger32() && !left->CheckFlag(kUint32)) {
1495 // A value with an integer representation does not need to be transformed.
1496 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32() &&
1497 !HChange::cast(left)->value()->CheckFlag(kUint32)) {
1498 // A change from an integer32 can be replaced by the integer32 value.
1499 left = HChange::cast(left)->value();
1500 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1501 left = Prepend(new(block()->zone()) HChange(
1502 left, Representation::Integer32(), false, false));
1503 } else {
1504 return this;
1505 }
1506
1507 HValue* right = hdiv->right();
1508 if (right->IsInteger32Constant()) {
1509 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1510 Representation::Integer32(), right->block()->zone()));
1511 } else if (right->representation().IsInteger32() &&
1512 !right->CheckFlag(kUint32)) {
1513 // A value with an integer representation does not need to be transformed.
1514 } else if (right->IsChange() &&
1515 HChange::cast(right)->from().IsInteger32() &&
1516 !HChange::cast(right)->value()->CheckFlag(kUint32)) {
1517 // A change from an integer32 can be replaced by the integer32 value.
1518 right = HChange::cast(right)->value();
1519 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1520 right = Prepend(new(block()->zone()) HChange(
1521 right, Representation::Integer32(), false, false));
1522 } else {
1523 return this;
1524 }
1525
1526 return Prepend(HMathFloorOfDiv::New(
1527 block()->graph()->isolate(), block()->zone(), context(), left, right));
1528 }
1529 return this;
1530}
1531
1532
1533HValue* HCheckInstanceType::Canonicalize() {
1534 if ((check_ == IS_JS_RECEIVER && value()->type().IsJSReceiver()) ||
1535 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1536 (check_ == IS_STRING && value()->type().IsString())) {
1537 return value();
1538 }
1539
1540 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1541 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1542 return value();
1543 }
1544 }
1545 return this;
1546}
1547
1548
1549void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1550 InstanceType* last) {
1551 DCHECK(is_interval_check());
1552 switch (check_) {
1553 case IS_JS_RECEIVER:
1554 *first = FIRST_JS_RECEIVER_TYPE;
1555 *last = LAST_JS_RECEIVER_TYPE;
1556 return;
1557 case IS_JS_ARRAY:
1558 *first = *last = JS_ARRAY_TYPE;
1559 return;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001560 case IS_JS_FUNCTION:
1561 *first = *last = JS_FUNCTION_TYPE;
1562 return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563 case IS_JS_DATE:
1564 *first = *last = JS_DATE_TYPE;
1565 return;
1566 default:
1567 UNREACHABLE();
1568 }
1569}
1570
1571
1572void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1573 DCHECK(!is_interval_check());
1574 switch (check_) {
1575 case IS_STRING:
1576 *mask = kIsNotStringMask;
1577 *tag = kStringTag;
1578 return;
1579 case IS_INTERNALIZED_STRING:
1580 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1581 *tag = kInternalizedTag;
1582 return;
1583 default:
1584 UNREACHABLE();
1585 }
1586}
1587
1588
1589std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1590 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1591 for (int i = 1; i < maps()->size(); ++i) {
1592 os << "," << *maps()->at(i).handle();
1593 }
1594 os << "]";
1595 if (IsStabilityCheck()) os << "(stability-check)";
1596 return os;
1597}
1598
1599
1600HValue* HCheckMaps::Canonicalize() {
1601 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1602 HConstant* c_value = HConstant::cast(value());
1603 if (c_value->HasObjectMap()) {
1604 for (int i = 0; i < maps()->size(); ++i) {
1605 if (c_value->ObjectMap() == maps()->at(i)) {
1606 if (maps()->size() > 1) {
1607 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1608 maps()->at(i), block()->graph()->zone()));
1609 }
1610 MarkAsStabilityCheck();
1611 break;
1612 }
1613 }
1614 }
1615 }
1616 return this;
1617}
1618
1619
1620std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1621 return os << NameOf(value()) << " " << Brief(*object().handle());
1622}
1623
1624
1625HValue* HCheckValue::Canonicalize() {
1626 return (value()->IsConstant() &&
1627 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1628}
1629
1630
1631const char* HCheckInstanceType::GetCheckName() const {
1632 switch (check_) {
1633 case IS_JS_RECEIVER: return "object";
1634 case IS_JS_ARRAY: return "array";
Ben Murdoch61f157c2016-09-16 13:49:30 +01001635 case IS_JS_FUNCTION:
1636 return "function";
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001637 case IS_JS_DATE:
1638 return "date";
1639 case IS_STRING: return "string";
1640 case IS_INTERNALIZED_STRING: return "internalized_string";
1641 }
1642 UNREACHABLE();
1643 return "";
1644}
1645
1646
1647std::ostream& HCheckInstanceType::PrintDataTo(
1648 std::ostream& os) const { // NOLINT
1649 os << GetCheckName() << " ";
1650 return HUnaryOperation::PrintDataTo(os);
1651}
1652
1653
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1655 const char* type = "expression";
1656 if (environment_->is_local_index(index_)) type = "local";
1657 if (environment_->is_special_index(index_)) type = "special";
1658 if (environment_->is_parameter_index(index_)) type = "parameter";
1659 return os << type << " @ " << index_;
1660}
1661
1662
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001663Range* HValue::InferRange(Zone* zone) {
1664 Range* result;
1665 if (representation().IsSmi() || type().IsSmi()) {
1666 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1667 result->set_can_be_minus_zero(false);
1668 } else {
1669 result = new(zone) Range();
1670 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1671 // TODO(jkummerow): The range cannot be minus zero when the upper type
1672 // bound is Integer32.
1673 }
1674 return result;
1675}
1676
1677
1678Range* HChange::InferRange(Zone* zone) {
1679 Range* input_range = value()->range();
1680 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1681 (to().IsSmi() ||
1682 (to().IsTagged() &&
1683 input_range != NULL &&
1684 input_range->IsInSmiRange()))) {
1685 set_type(HType::Smi());
1686 ClearChangesFlag(kNewSpacePromotion);
1687 }
1688 if (to().IsSmiOrTagged() &&
1689 input_range != NULL &&
1690 input_range->IsInSmiRange() &&
1691 (!SmiValuesAre32Bits() ||
1692 !value()->CheckFlag(HValue::kUint32) ||
1693 input_range->upper() != kMaxInt)) {
1694 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1695 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1696 ClearFlag(kCanOverflow);
1697 }
1698 Range* result = (input_range != NULL)
1699 ? input_range->Copy(zone)
1700 : HValue::InferRange(zone);
1701 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1702 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1703 CheckFlag(kAllUsesTruncatingToSmi)));
1704 if (to().IsSmi()) result->ClampToSmi();
1705 return result;
1706}
1707
1708
1709Range* HConstant::InferRange(Zone* zone) {
1710 if (HasInteger32Value()) {
1711 Range* result = new(zone) Range(int32_value_, int32_value_);
1712 result->set_can_be_minus_zero(false);
1713 return result;
1714 }
1715 return HValue::InferRange(zone);
1716}
1717
1718
1719SourcePosition HPhi::position() const { return block()->first()->position(); }
1720
1721
1722Range* HPhi::InferRange(Zone* zone) {
1723 Representation r = representation();
1724 if (r.IsSmiOrInteger32()) {
1725 if (block()->IsLoopHeader()) {
1726 Range* range = r.IsSmi()
1727 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1728 : new(zone) Range(kMinInt, kMaxInt);
1729 return range;
1730 } else {
1731 Range* range = OperandAt(0)->range()->Copy(zone);
1732 for (int i = 1; i < OperandCount(); ++i) {
1733 range->Union(OperandAt(i)->range());
1734 }
1735 return range;
1736 }
1737 } else {
1738 return HValue::InferRange(zone);
1739 }
1740}
1741
1742
1743Range* HAdd::InferRange(Zone* zone) {
1744 Representation r = representation();
1745 if (r.IsSmiOrInteger32()) {
1746 Range* a = left()->range();
1747 Range* b = right()->range();
1748 Range* res = a->Copy(zone);
1749 if (!res->AddAndCheckOverflow(r, b) ||
1750 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1751 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1752 ClearFlag(kCanOverflow);
1753 }
1754 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1755 !CheckFlag(kAllUsesTruncatingToInt32) &&
1756 a->CanBeMinusZero() && b->CanBeMinusZero());
1757 return res;
1758 } else {
1759 return HValue::InferRange(zone);
1760 }
1761}
1762
1763
1764Range* HSub::InferRange(Zone* zone) {
1765 Representation r = representation();
1766 if (r.IsSmiOrInteger32()) {
1767 Range* a = left()->range();
1768 Range* b = right()->range();
1769 Range* res = a->Copy(zone);
1770 if (!res->SubAndCheckOverflow(r, b) ||
1771 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1772 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1773 ClearFlag(kCanOverflow);
1774 }
1775 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1776 !CheckFlag(kAllUsesTruncatingToInt32) &&
1777 a->CanBeMinusZero() && b->CanBeZero());
1778 return res;
1779 } else {
1780 return HValue::InferRange(zone);
1781 }
1782}
1783
1784
1785Range* HMul::InferRange(Zone* zone) {
1786 Representation r = representation();
1787 if (r.IsSmiOrInteger32()) {
1788 Range* a = left()->range();
1789 Range* b = right()->range();
1790 Range* res = a->Copy(zone);
1791 if (!res->MulAndCheckOverflow(r, b) ||
1792 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1793 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1794 MulMinusOne())) {
1795 // Truncated int multiplication is too precise and therefore not the
1796 // same as converting to Double and back.
1797 // Handle truncated integer multiplication by -1 special.
1798 ClearFlag(kCanOverflow);
1799 }
1800 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1801 !CheckFlag(kAllUsesTruncatingToInt32) &&
1802 ((a->CanBeZero() && b->CanBeNegative()) ||
1803 (a->CanBeNegative() && b->CanBeZero())));
1804 return res;
1805 } else {
1806 return HValue::InferRange(zone);
1807 }
1808}
1809
1810
1811Range* HDiv::InferRange(Zone* zone) {
1812 if (representation().IsInteger32()) {
1813 Range* a = left()->range();
1814 Range* b = right()->range();
1815 Range* result = new(zone) Range();
1816 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1817 (a->CanBeMinusZero() ||
1818 (a->CanBeZero() && b->CanBeNegative())));
1819 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1820 ClearFlag(kCanOverflow);
1821 }
1822
1823 if (!b->CanBeZero()) {
1824 ClearFlag(kCanBeDivByZero);
1825 }
1826 return result;
1827 } else {
1828 return HValue::InferRange(zone);
1829 }
1830}
1831
1832
1833Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1834 if (representation().IsInteger32()) {
1835 Range* a = left()->range();
1836 Range* b = right()->range();
1837 Range* result = new(zone) Range();
1838 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1839 (a->CanBeMinusZero() ||
1840 (a->CanBeZero() && b->CanBeNegative())));
1841 if (!a->Includes(kMinInt)) {
1842 ClearFlag(kLeftCanBeMinInt);
1843 }
1844
1845 if (!a->CanBeNegative()) {
1846 ClearFlag(HValue::kLeftCanBeNegative);
1847 }
1848
1849 if (!a->CanBePositive()) {
1850 ClearFlag(HValue::kLeftCanBePositive);
1851 }
1852
1853 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1854 ClearFlag(kCanOverflow);
1855 }
1856
1857 if (!b->CanBeZero()) {
1858 ClearFlag(kCanBeDivByZero);
1859 }
1860 return result;
1861 } else {
1862 return HValue::InferRange(zone);
1863 }
1864}
1865
1866
1867// Returns the absolute value of its argument minus one, avoiding undefined
1868// behavior at kMinInt.
1869static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1870
1871
1872Range* HMod::InferRange(Zone* zone) {
1873 if (representation().IsInteger32()) {
1874 Range* a = left()->range();
1875 Range* b = right()->range();
1876
1877 // The magnitude of the modulus is bounded by the right operand.
1878 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1879
1880 // The result of the modulo operation has the sign of its left operand.
1881 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1882 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1883 a->CanBePositive() ? positive_bound : 0);
1884
1885 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1886 left_can_be_negative);
1887
1888 if (!a->CanBeNegative()) {
1889 ClearFlag(HValue::kLeftCanBeNegative);
1890 }
1891
1892 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1893 ClearFlag(HValue::kCanOverflow);
1894 }
1895
1896 if (!b->CanBeZero()) {
1897 ClearFlag(HValue::kCanBeDivByZero);
1898 }
1899 return result;
1900 } else {
1901 return HValue::InferRange(zone);
1902 }
1903}
1904
1905
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001906Range* HMathMinMax::InferRange(Zone* zone) {
1907 if (representation().IsSmiOrInteger32()) {
1908 Range* a = left()->range();
1909 Range* b = right()->range();
1910 Range* res = a->Copy(zone);
1911 if (operation_ == kMathMax) {
1912 res->CombinedMax(b);
1913 } else {
1914 DCHECK(operation_ == kMathMin);
1915 res->CombinedMin(b);
1916 }
1917 return res;
1918 } else {
1919 return HValue::InferRange(zone);
1920 }
1921}
1922
1923
1924void HPushArguments::AddInput(HValue* value) {
1925 inputs_.Add(NULL, value->block()->zone());
1926 SetOperandAt(OperandCount() - 1, value);
1927}
1928
1929
1930std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
1931 os << "[";
1932 for (int i = 0; i < OperandCount(); ++i) {
1933 os << " " << NameOf(OperandAt(i)) << " ";
1934 }
1935 return os << " uses" << UseCount()
1936 << representation_from_indirect_uses().Mnemonic() << " "
1937 << TypeOf(this) << "]";
1938}
1939
1940
1941void HPhi::AddInput(HValue* value) {
1942 inputs_.Add(NULL, value->block()->zone());
1943 SetOperandAt(OperandCount() - 1, value);
1944 // Mark phis that may have 'arguments' directly or indirectly as an operand.
1945 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
1946 SetFlag(kIsArguments);
1947 }
1948}
1949
1950
1951bool HPhi::HasRealUses() {
1952 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
1953 if (!it.value()->IsPhi()) return true;
1954 }
1955 return false;
1956}
1957
1958
1959HValue* HPhi::GetRedundantReplacement() {
1960 HValue* candidate = NULL;
1961 int count = OperandCount();
1962 int position = 0;
1963 while (position < count && candidate == NULL) {
1964 HValue* current = OperandAt(position++);
1965 if (current != this) candidate = current;
1966 }
1967 while (position < count) {
1968 HValue* current = OperandAt(position++);
1969 if (current != this && current != candidate) return NULL;
1970 }
1971 DCHECK(candidate != this);
1972 return candidate;
1973}
1974
1975
1976void HPhi::DeleteFromGraph() {
1977 DCHECK(block() != NULL);
1978 block()->RemovePhi(this);
1979 DCHECK(block() == NULL);
1980}
1981
1982
1983void HPhi::InitRealUses(int phi_id) {
1984 // Initialize real uses.
1985 phi_id_ = phi_id;
1986 // Compute a conservative approximation of truncating uses before inferring
1987 // representations. The proper, exact computation will be done later, when
1988 // inserting representation changes.
1989 SetFlag(kTruncatingToSmi);
1990 SetFlag(kTruncatingToInt32);
1991 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
1992 HValue* value = it.value();
1993 if (!value->IsPhi()) {
1994 Representation rep = value->observed_input_representation(it.index());
1995 representation_from_non_phi_uses_ =
1996 representation_from_non_phi_uses().generalize(rep);
1997 if (rep.IsSmi() || rep.IsInteger32() || rep.IsDouble()) {
1998 has_type_feedback_from_uses_ = true;
1999 }
2000
2001 if (FLAG_trace_representation) {
2002 PrintF("#%d Phi is used by real #%d %s as %s\n",
2003 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2004 }
2005 if (!value->IsSimulate()) {
2006 if (!value->CheckFlag(kTruncatingToSmi)) {
2007 ClearFlag(kTruncatingToSmi);
2008 }
2009 if (!value->CheckFlag(kTruncatingToInt32)) {
2010 ClearFlag(kTruncatingToInt32);
2011 }
2012 }
2013 }
2014 }
2015}
2016
2017
2018void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2019 if (FLAG_trace_representation) {
2020 PrintF(
2021 "generalizing use representation '%s' of #%d Phi "
2022 "with uses of #%d Phi '%s'\n",
2023 representation_from_indirect_uses().Mnemonic(), id(), other->id(),
2024 other->representation_from_non_phi_uses().Mnemonic());
2025 }
2026
2027 representation_from_indirect_uses_ =
2028 representation_from_indirect_uses().generalize(
2029 other->representation_from_non_phi_uses());
2030}
2031
2032
2033void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2034 while (!list->is_empty()) {
2035 HSimulate* from = list->RemoveLast();
2036 ZoneList<HValue*>* from_values = &from->values_;
2037 for (int i = 0; i < from_values->length(); ++i) {
2038 if (from->HasAssignedIndexAt(i)) {
2039 int index = from->GetAssignedIndexAt(i);
2040 if (HasValueForIndex(index)) continue;
2041 AddAssignedValue(index, from_values->at(i));
2042 } else {
2043 if (pop_count_ > 0) {
2044 pop_count_--;
2045 } else {
2046 AddPushedValue(from_values->at(i));
2047 }
2048 }
2049 }
2050 pop_count_ += from->pop_count_;
2051 from->DeleteAndReplaceWith(NULL);
2052 }
2053}
2054
2055
2056std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2057 os << "id=" << ast_id().ToInt();
2058 if (pop_count_ > 0) os << " pop " << pop_count_;
2059 if (values_.length() > 0) {
2060 if (pop_count_ > 0) os << " /";
2061 for (int i = values_.length() - 1; i >= 0; --i) {
2062 if (HasAssignedIndexAt(i)) {
2063 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2064 } else {
2065 os << " push ";
2066 }
2067 os << NameOf(values_[i]);
2068 if (i > 0) os << ",";
2069 }
2070 }
2071 return os;
2072}
2073
2074
2075void HSimulate::ReplayEnvironment(HEnvironment* env) {
2076 if (is_done_with_replay()) return;
2077 DCHECK(env != NULL);
2078 env->set_ast_id(ast_id());
2079 env->Drop(pop_count());
2080 for (int i = values()->length() - 1; i >= 0; --i) {
2081 HValue* value = values()->at(i);
2082 if (HasAssignedIndexAt(i)) {
2083 env->Bind(GetAssignedIndexAt(i), value);
2084 } else {
2085 env->Push(value);
2086 }
2087 }
2088 set_done_with_replay();
2089}
2090
2091
2092static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2093 HCapturedObject* other) {
2094 for (int i = 0; i < values->length(); ++i) {
2095 HValue* value = values->at(i);
2096 if (value->IsCapturedObject()) {
2097 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2098 values->at(i) = other;
2099 } else {
2100 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2101 }
2102 }
2103 }
2104}
2105
2106
2107// Replay captured objects by replacing all captured objects with the
2108// same capture id in the current and all outer environments.
2109void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2110 DCHECK(env != NULL);
2111 while (env != NULL) {
2112 ReplayEnvironmentNested(env->values(), this);
2113 env = env->outer();
2114 }
2115}
2116
2117
2118std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2119 os << "#" << capture_id() << " ";
2120 return HDematerializedObject::PrintDataTo(os);
2121}
2122
2123
2124void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2125 Zone* zone) {
2126 DCHECK(return_target->IsInlineReturnTarget());
2127 return_targets_.Add(return_target, zone);
2128}
2129
2130
2131std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
Ben Murdochda12d292016-06-02 14:46:10 +01002132 os << function()->debug_name()->ToCString().get();
2133 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
2134 os << ", JSTailCall";
2135 }
2136 return os;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002137}
2138
2139
2140static bool IsInteger32(double value) {
2141 if (value >= std::numeric_limits<int32_t>::min() &&
2142 value <= std::numeric_limits<int32_t>::max()) {
2143 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2144 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2145 }
2146 return false;
2147}
2148
2149
2150HConstant::HConstant(Special special)
2151 : HTemplateInstruction<0>(HType::TaggedNumber()),
2152 object_(Handle<Object>::null()),
2153 object_map_(Handle<Map>::null()),
2154 bit_field_(HasDoubleValueField::encode(true) |
2155 InstanceTypeField::encode(kUnknownInstanceType)),
2156 int32_value_(0) {
2157 DCHECK_EQ(kHoleNaN, special);
2158 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2159 Initialize(Representation::Double());
2160}
2161
2162
2163HConstant::HConstant(Handle<Object> object, Representation r)
2164 : HTemplateInstruction<0>(HType::FromValue(object)),
2165 object_(Unique<Object>::CreateUninitialized(object)),
2166 object_map_(Handle<Map>::null()),
2167 bit_field_(
2168 HasStableMapValueField::encode(false) |
2169 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2170 HasDoubleValueField::encode(false) |
2171 HasExternalReferenceValueField::encode(false) |
2172 IsNotInNewSpaceField::encode(true) |
2173 BooleanValueField::encode(object->BooleanValue()) |
2174 IsUndetectableField::encode(false) | IsCallableField::encode(false) |
2175 InstanceTypeField::encode(kUnknownInstanceType)) {
2176 if (object->IsHeapObject()) {
2177 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2178 Isolate* isolate = heap_object->GetIsolate();
2179 Handle<Map> map(heap_object->map(), isolate);
2180 bit_field_ = IsNotInNewSpaceField::update(
2181 bit_field_, !isolate->heap()->InNewSpace(*object));
2182 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2183 bit_field_ =
2184 IsUndetectableField::update(bit_field_, map->is_undetectable());
2185 bit_field_ = IsCallableField::update(bit_field_, map->is_callable());
2186 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2187 bit_field_ = HasStableMapValueField::update(
2188 bit_field_,
2189 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2190 }
2191 if (object->IsNumber()) {
2192 double n = object->Number();
2193 bool has_int32_value = IsInteger32(n);
2194 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2195 int32_value_ = DoubleToInt32(n);
2196 bit_field_ = HasSmiValueField::update(
2197 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2198 double_value_ = n;
2199 bit_field_ = HasDoubleValueField::update(bit_field_, true);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002200 }
2201
2202 Initialize(r);
2203}
2204
2205
2206HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2207 bool has_stable_map_value, Representation r, HType type,
2208 bool is_not_in_new_space, bool boolean_value,
2209 bool is_undetectable, InstanceType instance_type)
2210 : HTemplateInstruction<0>(type),
2211 object_(object),
2212 object_map_(object_map),
2213 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2214 HasSmiValueField::encode(false) |
2215 HasInt32ValueField::encode(false) |
2216 HasDoubleValueField::encode(false) |
2217 HasExternalReferenceValueField::encode(false) |
2218 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2219 BooleanValueField::encode(boolean_value) |
2220 IsUndetectableField::encode(is_undetectable) |
2221 InstanceTypeField::encode(instance_type)) {
2222 DCHECK(!object.handle().is_null());
2223 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2224 Initialize(r);
2225}
2226
2227
2228HConstant::HConstant(int32_t integer_value, Representation r,
2229 bool is_not_in_new_space, Unique<Object> object)
2230 : object_(object),
2231 object_map_(Handle<Map>::null()),
2232 bit_field_(HasStableMapValueField::encode(false) |
2233 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2234 HasInt32ValueField::encode(true) |
2235 HasDoubleValueField::encode(true) |
2236 HasExternalReferenceValueField::encode(false) |
2237 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2238 BooleanValueField::encode(integer_value != 0) |
2239 IsUndetectableField::encode(false) |
2240 InstanceTypeField::encode(kUnknownInstanceType)),
2241 int32_value_(integer_value),
2242 double_value_(FastI2D(integer_value)) {
2243 // It's possible to create a constant with a value in Smi-range but stored
2244 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2245 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2246 bool is_smi = HasSmiValue() && !could_be_heapobject;
2247 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2248 Initialize(r);
2249}
2250
2251
2252HConstant::HConstant(double double_value, Representation r,
2253 bool is_not_in_new_space, Unique<Object> object)
2254 : object_(object),
2255 object_map_(Handle<Map>::null()),
2256 bit_field_(HasStableMapValueField::encode(false) |
2257 HasInt32ValueField::encode(IsInteger32(double_value)) |
2258 HasDoubleValueField::encode(true) |
2259 HasExternalReferenceValueField::encode(false) |
2260 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2261 BooleanValueField::encode(double_value != 0 &&
2262 !std::isnan(double_value)) |
2263 IsUndetectableField::encode(false) |
2264 InstanceTypeField::encode(kUnknownInstanceType)),
2265 int32_value_(DoubleToInt32(double_value)),
2266 double_value_(double_value) {
2267 bit_field_ = HasSmiValueField::update(
2268 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2269 // It's possible to create a constant with a value in Smi-range but stored
2270 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2271 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2272 bool is_smi = HasSmiValue() && !could_be_heapobject;
2273 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2274 Initialize(r);
2275}
2276
2277
2278HConstant::HConstant(ExternalReference reference)
2279 : HTemplateInstruction<0>(HType::Any()),
2280 object_(Unique<Object>(Handle<Object>::null())),
2281 object_map_(Handle<Map>::null()),
2282 bit_field_(
2283 HasStableMapValueField::encode(false) |
2284 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2285 HasDoubleValueField::encode(false) |
2286 HasExternalReferenceValueField::encode(true) |
2287 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2288 IsUndetectableField::encode(false) |
2289 InstanceTypeField::encode(kUnknownInstanceType)),
2290 external_reference_value_(reference) {
2291 Initialize(Representation::External());
2292}
2293
2294
2295void HConstant::Initialize(Representation r) {
2296 if (r.IsNone()) {
2297 if (HasSmiValue() && SmiValuesAre31Bits()) {
2298 r = Representation::Smi();
2299 } else if (HasInteger32Value()) {
2300 r = Representation::Integer32();
2301 } else if (HasDoubleValue()) {
2302 r = Representation::Double();
2303 } else if (HasExternalReferenceValue()) {
2304 r = Representation::External();
2305 } else {
2306 Handle<Object> object = object_.handle();
2307 if (object->IsJSObject()) {
2308 // Try to eagerly migrate JSObjects that have deprecated maps.
2309 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2310 if (js_object->map()->is_deprecated()) {
2311 JSObject::TryMigrateInstance(js_object);
2312 }
2313 }
2314 r = Representation::Tagged();
2315 }
2316 }
2317 if (r.IsSmi()) {
2318 // If we have an existing handle, zap it, because it might be a heap
2319 // number which we must not re-use when copying this HConstant to
2320 // Tagged representation later, because having Smi representation now
2321 // could cause heap object checks not to get emitted.
2322 object_ = Unique<Object>(Handle<Object>::null());
2323 }
2324 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2325 // If it's not a heap object, it can't be in new space.
2326 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2327 }
2328 set_representation(r);
2329 SetFlag(kUseGVN);
2330}
2331
2332
2333bool HConstant::ImmortalImmovable() const {
2334 if (HasInteger32Value()) {
2335 return false;
2336 }
2337 if (HasDoubleValue()) {
2338 if (IsSpecialDouble()) {
2339 return true;
2340 }
2341 return false;
2342 }
2343 if (HasExternalReferenceValue()) {
2344 return false;
2345 }
2346
2347 DCHECK(!object_.handle().is_null());
2348 Heap* heap = isolate()->heap();
2349 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2350 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2351 return
2352#define IMMORTAL_IMMOVABLE_ROOT(name) \
2353 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2354 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2355#undef IMMORTAL_IMMOVABLE_ROOT
2356#define INTERNALIZED_STRING(name, value) \
2357 object_.IsKnownGlobal(heap->name()) ||
2358 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2359#undef INTERNALIZED_STRING
2360#define STRING_TYPE(NAME, size, name, Name) \
2361 object_.IsKnownGlobal(heap->name##_map()) ||
2362 STRING_TYPE_LIST(STRING_TYPE)
2363#undef STRING_TYPE
2364 false;
2365}
2366
2367
2368bool HConstant::EmitAtUses() {
2369 DCHECK(IsLinked());
2370 if (block()->graph()->has_osr() &&
2371 block()->graph()->IsStandardConstant(this)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002372 return true;
2373 }
2374 if (HasNoUses()) return true;
2375 if (IsCell()) return false;
2376 if (representation().IsDouble()) return false;
2377 if (representation().IsExternal()) return false;
2378 return true;
2379}
2380
2381
2382HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2383 if (r.IsSmi() && !HasSmiValue()) return NULL;
2384 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2385 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2386 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2387 if (HasInteger32Value()) {
2388 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2389 }
2390 if (HasDoubleValue()) {
2391 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2392 }
2393 if (HasExternalReferenceValue()) {
2394 return new(zone) HConstant(external_reference_value_);
2395 }
2396 DCHECK(!object_.handle().is_null());
2397 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2398 type_, NotInNewSpace(), BooleanValue(),
2399 IsUndetectable(), GetInstanceType());
2400}
2401
2402
2403Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2404 HConstant* res = NULL;
2405 if (HasInteger32Value()) {
2406 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2407 NotInNewSpace(), object_);
2408 } else if (HasDoubleValue()) {
2409 res = new (zone)
2410 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2411 NotInNewSpace(), object_);
2412 }
2413 return res != NULL ? Just(res) : Nothing<HConstant*>();
2414}
2415
2416
2417Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2418 Zone* zone) {
2419 HConstant* res = NULL;
2420 Handle<Object> handle = this->handle(isolate);
2421 if (handle->IsBoolean()) {
2422 res = handle->BooleanValue() ?
2423 new(zone) HConstant(1) : new(zone) HConstant(0);
Ben Murdoch61f157c2016-09-16 13:49:30 +01002424 } else if (handle->IsUndefined(isolate)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002425 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
Ben Murdoch61f157c2016-09-16 13:49:30 +01002426 } else if (handle->IsNull(isolate)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002427 res = new(zone) HConstant(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +01002428 } else if (handle->IsString()) {
2429 res = new(zone) HConstant(String::ToNumber(Handle<String>::cast(handle)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002430 }
2431 return res != NULL ? Just(res) : Nothing<HConstant*>();
2432}
2433
2434
2435std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2436 if (HasInteger32Value()) {
2437 os << int32_value_ << " ";
2438 } else if (HasDoubleValue()) {
2439 os << double_value_ << " ";
2440 } else if (HasExternalReferenceValue()) {
2441 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2442 } else {
2443 // The handle() method is silently and lazily mutating the object.
2444 Handle<Object> h = const_cast<HConstant*>(this)->handle(isolate());
2445 os << Brief(*h) << " ";
2446 if (HasStableMapValue()) os << "[stable-map] ";
2447 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2448 }
2449 if (!NotInNewSpace()) os << "[new space] ";
2450 return os;
2451}
2452
2453
2454std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2455 os << NameOf(left()) << " " << NameOf(right());
2456 if (CheckFlag(kCanOverflow)) os << " !";
2457 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2458 return os;
2459}
2460
2461
2462void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2463 DCHECK(CheckFlag(kFlexibleRepresentation));
2464 Representation new_rep = RepresentationFromInputs();
2465 UpdateRepresentation(new_rep, h_infer, "inputs");
2466
2467 if (representation().IsSmi() && HasNonSmiUse()) {
2468 UpdateRepresentation(
2469 Representation::Integer32(), h_infer, "use requirements");
2470 }
2471
2472 if (observed_output_representation_.IsNone()) {
2473 new_rep = RepresentationFromUses();
2474 UpdateRepresentation(new_rep, h_infer, "uses");
2475 } else {
2476 new_rep = RepresentationFromOutput();
2477 UpdateRepresentation(new_rep, h_infer, "output");
2478 }
2479}
2480
2481
2482Representation HBinaryOperation::RepresentationFromInputs() {
2483 // Determine the worst case of observed input representations and
2484 // the currently assumed output representation.
2485 Representation rep = representation();
2486 for (int i = 1; i <= 2; ++i) {
2487 rep = rep.generalize(observed_input_representation(i));
2488 }
2489 // If any of the actual input representation is more general than what we
2490 // have so far but not Tagged, use that representation instead.
2491 Representation left_rep = left()->representation();
2492 Representation right_rep = right()->representation();
2493 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2494 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2495
2496 return rep;
2497}
2498
2499
2500bool HBinaryOperation::IgnoreObservedOutputRepresentation(
2501 Representation current_rep) {
2502 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
2503 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
2504 // Mul in Integer32 mode would be too precise.
2505 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
2506}
2507
2508
2509Representation HBinaryOperation::RepresentationFromOutput() {
2510 Representation rep = representation();
2511 // Consider observed output representation, but ignore it if it's Double,
2512 // this instruction is not a division, and all its uses are truncating
2513 // to Integer32.
2514 if (observed_output_representation_.is_more_general_than(rep) &&
2515 !IgnoreObservedOutputRepresentation(rep)) {
2516 return observed_output_representation_;
2517 }
2518 return Representation::None();
2519}
2520
2521
2522void HBinaryOperation::AssumeRepresentation(Representation r) {
2523 set_observed_input_representation(1, r);
2524 set_observed_input_representation(2, r);
2525 HValue::AssumeRepresentation(r);
2526}
2527
2528
2529void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
2530 DCHECK(CheckFlag(kFlexibleRepresentation));
2531 Representation new_rep = RepresentationFromInputs();
2532 UpdateRepresentation(new_rep, h_infer, "inputs");
2533 // Do not care about uses.
2534}
2535
2536
2537Range* HBitwise::InferRange(Zone* zone) {
2538 if (op() == Token::BIT_XOR) {
2539 if (left()->HasRange() && right()->HasRange()) {
2540 // The maximum value has the high bit, and all bits below, set:
2541 // (1 << high) - 1.
2542 // If the range can be negative, the minimum int is a negative number with
2543 // the high bit, and all bits below, unset:
2544 // -(1 << high).
2545 // If it cannot be negative, conservatively choose 0 as minimum int.
2546 int64_t left_upper = left()->range()->upper();
2547 int64_t left_lower = left()->range()->lower();
2548 int64_t right_upper = right()->range()->upper();
2549 int64_t right_lower = right()->range()->lower();
2550
2551 if (left_upper < 0) left_upper = ~left_upper;
2552 if (left_lower < 0) left_lower = ~left_lower;
2553 if (right_upper < 0) right_upper = ~right_upper;
2554 if (right_lower < 0) right_lower = ~right_lower;
2555
2556 int high = MostSignificantBit(
2557 static_cast<uint32_t>(
2558 left_upper | left_lower | right_upper | right_lower));
2559
2560 int64_t limit = 1;
2561 limit <<= high;
2562 int32_t min = (left()->range()->CanBeNegative() ||
2563 right()->range()->CanBeNegative())
2564 ? static_cast<int32_t>(-limit) : 0;
2565 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
2566 }
2567 Range* result = HValue::InferRange(zone);
2568 result->set_can_be_minus_zero(false);
2569 return result;
2570 }
2571 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
2572 int32_t left_mask = (left()->range() != NULL)
2573 ? left()->range()->Mask()
2574 : kDefaultMask;
2575 int32_t right_mask = (right()->range() != NULL)
2576 ? right()->range()->Mask()
2577 : kDefaultMask;
2578 int32_t result_mask = (op() == Token::BIT_AND)
2579 ? left_mask & right_mask
2580 : left_mask | right_mask;
2581 if (result_mask >= 0) return new(zone) Range(0, result_mask);
2582
2583 Range* result = HValue::InferRange(zone);
2584 result->set_can_be_minus_zero(false);
2585 return result;
2586}
2587
2588
2589Range* HSar::InferRange(Zone* zone) {
2590 if (right()->IsConstant()) {
2591 HConstant* c = HConstant::cast(right());
2592 if (c->HasInteger32Value()) {
2593 Range* result = (left()->range() != NULL)
2594 ? left()->range()->Copy(zone)
2595 : new(zone) Range();
2596 result->Sar(c->Integer32Value());
2597 return result;
2598 }
2599 }
2600 return HValue::InferRange(zone);
2601}
2602
2603
2604Range* HShr::InferRange(Zone* zone) {
2605 if (right()->IsConstant()) {
2606 HConstant* c = HConstant::cast(right());
2607 if (c->HasInteger32Value()) {
2608 int shift_count = c->Integer32Value() & 0x1f;
2609 if (left()->range()->CanBeNegative()) {
2610 // Only compute bounds if the result always fits into an int32.
2611 return (shift_count >= 1)
2612 ? new(zone) Range(0,
2613 static_cast<uint32_t>(0xffffffff) >> shift_count)
2614 : new(zone) Range();
2615 } else {
2616 // For positive inputs we can use the >> operator.
2617 Range* result = (left()->range() != NULL)
2618 ? left()->range()->Copy(zone)
2619 : new(zone) Range();
2620 result->Sar(c->Integer32Value());
2621 return result;
2622 }
2623 }
2624 }
2625 return HValue::InferRange(zone);
2626}
2627
2628
2629Range* HShl::InferRange(Zone* zone) {
2630 if (right()->IsConstant()) {
2631 HConstant* c = HConstant::cast(right());
2632 if (c->HasInteger32Value()) {
2633 Range* result = (left()->range() != NULL)
2634 ? left()->range()->Copy(zone)
2635 : new(zone) Range();
2636 result->Shl(c->Integer32Value());
2637 return result;
2638 }
2639 }
2640 return HValue::InferRange(zone);
2641}
2642
2643
2644Range* HLoadNamedField::InferRange(Zone* zone) {
2645 if (access().representation().IsInteger8()) {
2646 return new(zone) Range(kMinInt8, kMaxInt8);
2647 }
2648 if (access().representation().IsUInteger8()) {
2649 return new(zone) Range(kMinUInt8, kMaxUInt8);
2650 }
2651 if (access().representation().IsInteger16()) {
2652 return new(zone) Range(kMinInt16, kMaxInt16);
2653 }
2654 if (access().representation().IsUInteger16()) {
2655 return new(zone) Range(kMinUInt16, kMaxUInt16);
2656 }
2657 if (access().IsStringLength()) {
2658 return new(zone) Range(0, String::kMaxLength);
2659 }
2660 return HValue::InferRange(zone);
2661}
2662
2663
2664Range* HLoadKeyed::InferRange(Zone* zone) {
2665 switch (elements_kind()) {
2666 case INT8_ELEMENTS:
2667 return new(zone) Range(kMinInt8, kMaxInt8);
2668 case UINT8_ELEMENTS:
2669 case UINT8_CLAMPED_ELEMENTS:
2670 return new(zone) Range(kMinUInt8, kMaxUInt8);
2671 case INT16_ELEMENTS:
2672 return new(zone) Range(kMinInt16, kMaxInt16);
2673 case UINT16_ELEMENTS:
2674 return new(zone) Range(kMinUInt16, kMaxUInt16);
2675 default:
2676 return HValue::InferRange(zone);
2677 }
2678}
2679
2680
2681std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
2682 os << Token::Name(token()) << " ";
2683 return HBinaryOperation::PrintDataTo(os);
2684}
2685
2686
2687std::ostream& HStringCompareAndBranch::PrintDataTo(
2688 std::ostream& os) const { // NOLINT
2689 os << Token::Name(token()) << " ";
2690 return HControlInstruction::PrintDataTo(os);
2691}
2692
2693
2694std::ostream& HCompareNumericAndBranch::PrintDataTo(
2695 std::ostream& os) const { // NOLINT
2696 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
2697 return HControlInstruction::PrintDataTo(os);
2698}
2699
2700
2701std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
2702 std::ostream& os) const { // NOLINT
2703 os << NameOf(left()) << " " << NameOf(right());
2704 return HControlInstruction::PrintDataTo(os);
2705}
2706
2707
2708bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2709 if (known_successor_index() != kNoKnownSuccessorIndex) {
2710 *block = SuccessorAt(known_successor_index());
2711 return true;
2712 }
2713 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
2714 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
2715 ? FirstSuccessor() : SecondSuccessor();
2716 return true;
2717 }
2718 *block = NULL;
2719 return false;
2720}
2721
2722
2723bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2724 if (known_successor_index() != kNoKnownSuccessorIndex) {
2725 *block = SuccessorAt(known_successor_index());
2726 return true;
2727 }
2728 if (FLAG_fold_constants && value()->IsConstant()) {
2729 *block = HConstant::cast(value())->HasStringValue()
2730 ? FirstSuccessor() : SecondSuccessor();
2731 return true;
2732 }
2733 if (value()->type().IsString()) {
2734 *block = FirstSuccessor();
2735 return true;
2736 }
2737 if (value()->type().IsSmi() ||
2738 value()->type().IsNull() ||
2739 value()->type().IsBoolean() ||
2740 value()->type().IsUndefined() ||
2741 value()->type().IsJSReceiver()) {
2742 *block = SecondSuccessor();
2743 return true;
2744 }
2745 *block = NULL;
2746 return false;
2747}
2748
2749
2750bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2751 if (FLAG_fold_constants && value()->IsConstant()) {
2752 *block = HConstant::cast(value())->IsUndetectable()
2753 ? FirstSuccessor() : SecondSuccessor();
2754 return true;
2755 }
Ben Murdochda12d292016-06-02 14:46:10 +01002756 if (value()->type().IsNull() || value()->type().IsUndefined()) {
2757 *block = FirstSuccessor();
2758 return true;
2759 }
2760 if (value()->type().IsBoolean() ||
2761 value()->type().IsSmi() ||
2762 value()->type().IsString() ||
2763 value()->type().IsJSReceiver()) {
2764 *block = SecondSuccessor();
2765 return true;
2766 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002767 *block = NULL;
2768 return false;
2769}
2770
2771
2772bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2773 if (FLAG_fold_constants && value()->IsConstant()) {
2774 InstanceType type = HConstant::cast(value())->GetInstanceType();
2775 *block = (from_ <= type) && (type <= to_)
2776 ? FirstSuccessor() : SecondSuccessor();
2777 return true;
2778 }
2779 *block = NULL;
2780 return false;
2781}
2782
2783
2784void HCompareHoleAndBranch::InferRepresentation(
2785 HInferRepresentationPhase* h_infer) {
2786 ChangeRepresentation(value()->representation());
2787}
2788
2789
2790bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2791 if (left() == right() &&
2792 left()->representation().IsSmiOrInteger32()) {
2793 *block = (token() == Token::EQ ||
2794 token() == Token::EQ_STRICT ||
2795 token() == Token::LTE ||
2796 token() == Token::GTE)
2797 ? FirstSuccessor() : SecondSuccessor();
2798 return true;
2799 }
2800 *block = NULL;
2801 return false;
2802}
2803
2804
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002805std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
2806 return os << *SuccessorAt(0);
2807}
2808
2809
2810void HCompareNumericAndBranch::InferRepresentation(
2811 HInferRepresentationPhase* h_infer) {
2812 Representation left_rep = left()->representation();
2813 Representation right_rep = right()->representation();
2814 Representation observed_left = observed_input_representation(0);
2815 Representation observed_right = observed_input_representation(1);
2816
2817 Representation rep = Representation::None();
2818 rep = rep.generalize(observed_left);
2819 rep = rep.generalize(observed_right);
2820 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
2821 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2822 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2823 } else {
2824 rep = Representation::Double();
2825 }
2826
2827 if (rep.IsDouble()) {
2828 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
2829 // and !=) have special handling of undefined, e.g. undefined == undefined
2830 // is 'true'. Relational comparisons have a different semantic, first
2831 // calling ToPrimitive() on their arguments. The standard Crankshaft
2832 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
2833 // inputs are doubles caused 'undefined' to be converted to NaN. That's
2834 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
2835 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
2836 // it is not consistent with the spec. For example, it would cause undefined
2837 // == undefined (should be true) to be evaluated as NaN == NaN
2838 // (false). Therefore, any comparisons other than ordered relational
2839 // comparisons must cause a deopt when one of their arguments is undefined.
2840 // See also v8:1434
Ben Murdoch097c5b22016-05-18 11:27:45 +01002841 if (Token::IsOrderedRelationalCompareOp(token_)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002842 SetFlag(kAllowUndefinedAsNaN);
2843 }
2844 }
2845 ChangeRepresentation(rep);
2846}
2847
2848
2849std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
2850 return os << index();
2851}
2852
2853
2854std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
2855 os << NameOf(object()) << access_;
2856
2857 if (maps() != NULL) {
2858 os << " [" << *maps()->at(0).handle();
2859 for (int i = 1; i < maps()->size(); ++i) {
2860 os << "," << *maps()->at(i).handle();
2861 }
2862 os << "]";
2863 }
2864
2865 if (HasDependency()) os << " " << NameOf(dependency());
2866 return os;
2867}
2868
2869
2870std::ostream& HLoadNamedGeneric::PrintDataTo(
2871 std::ostream& os) const { // NOLINT
2872 Handle<String> n = Handle<String>::cast(name());
2873 return os << NameOf(object()) << "." << n->ToCString().get();
2874}
2875
2876
2877std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
2878 if (!is_fixed_typed_array()) {
2879 os << NameOf(elements());
2880 } else {
2881 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
2882 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
2883 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
2884 }
2885
2886 os << "[" << NameOf(key());
2887 if (IsDehoisted()) os << " + " << base_offset();
2888 os << "]";
2889
2890 if (HasDependency()) os << " " << NameOf(dependency());
2891 if (RequiresHoleCheck()) os << " check_hole";
2892 return os;
2893}
2894
2895
2896bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
2897 // The base offset is usually simply the size of the array header, except
2898 // with dehoisting adds an addition offset due to a array index key
2899 // manipulation, in which case it becomes (array header size +
2900 // constant-offset-from-key * kPointerSize)
2901 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
2902 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
2903 addition_result += increase_by_value;
2904 if (!addition_result.IsValid()) return false;
2905 base_offset = addition_result.ValueOrDie();
2906 if (!BaseOffsetField::is_valid(base_offset)) return false;
2907 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
2908 return true;
2909}
2910
2911
2912bool HLoadKeyed::UsesMustHandleHole() const {
2913 if (IsFastPackedElementsKind(elements_kind())) {
2914 return false;
2915 }
2916
2917 if (IsFixedTypedArrayElementsKind(elements_kind())) {
2918 return false;
2919 }
2920
2921 if (hole_mode() == ALLOW_RETURN_HOLE) {
2922 if (IsFastDoubleElementsKind(elements_kind())) {
2923 return AllUsesCanTreatHoleAsNaN();
2924 }
2925 return true;
2926 }
2927
2928 if (IsFastDoubleElementsKind(elements_kind())) {
2929 return false;
2930 }
2931
2932 // Holes are only returned as tagged values.
2933 if (!representation().IsTagged()) {
2934 return false;
2935 }
2936
2937 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2938 HValue* use = it.value();
2939 if (!use->IsChange()) return false;
2940 }
2941
2942 return true;
2943}
2944
2945
2946bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
2947 return IsFastDoubleElementsKind(elements_kind()) &&
2948 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
2949}
2950
2951
2952bool HLoadKeyed::RequiresHoleCheck() const {
2953 if (IsFastPackedElementsKind(elements_kind())) {
2954 return false;
2955 }
2956
2957 if (IsFixedTypedArrayElementsKind(elements_kind())) {
2958 return false;
2959 }
2960
2961 if (hole_mode() == CONVERT_HOLE_TO_UNDEFINED) {
2962 return false;
2963 }
2964
2965 return !UsesMustHandleHole();
2966}
2967
2968
2969std::ostream& HLoadKeyedGeneric::PrintDataTo(
2970 std::ostream& os) const { // NOLINT
2971 return os << NameOf(object()) << "[" << NameOf(key()) << "]";
2972}
2973
2974
2975HValue* HLoadKeyedGeneric::Canonicalize() {
2976 // Recognize generic keyed loads that use property name generated
2977 // by for-in statement as a key and rewrite them into fast property load
2978 // by index.
2979 if (key()->IsLoadKeyed()) {
2980 HLoadKeyed* key_load = HLoadKeyed::cast(key());
2981 if (key_load->elements()->IsForInCacheArray()) {
2982 HForInCacheArray* names_cache =
2983 HForInCacheArray::cast(key_load->elements());
2984
2985 if (names_cache->enumerable() == object()) {
2986 HForInCacheArray* index_cache =
2987 names_cache->index_cache();
2988 HCheckMapValue* map_check = HCheckMapValue::New(
2989 block()->graph()->isolate(), block()->graph()->zone(),
2990 block()->graph()->GetInvalidContext(), object(),
2991 names_cache->map());
2992 HInstruction* index = HLoadKeyed::New(
2993 block()->graph()->isolate(), block()->graph()->zone(),
2994 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
2995 key_load->key(), nullptr, key_load->elements_kind());
2996 map_check->InsertBefore(this);
2997 index->InsertBefore(this);
2998 return Prepend(new(block()->zone()) HLoadFieldByIndex(
2999 object(), index));
3000 }
3001 }
3002 }
3003
3004 return this;
3005}
3006
3007
3008std::ostream& HStoreNamedGeneric::PrintDataTo(
3009 std::ostream& os) const { // NOLINT
3010 Handle<String> n = Handle<String>::cast(name());
3011 return os << NameOf(object()) << "." << n->ToCString().get() << " = "
3012 << NameOf(value());
3013}
3014
3015
3016std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3017 os << NameOf(object()) << access_ << " = " << NameOf(value());
3018 if (NeedsWriteBarrier()) os << " (write-barrier)";
3019 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3020 return os;
3021}
3022
3023
3024std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3025 if (!is_fixed_typed_array()) {
3026 os << NameOf(elements());
3027 } else {
3028 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
3029 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
3030 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3031 }
3032
3033 os << "[" << NameOf(key());
3034 if (IsDehoisted()) os << " + " << base_offset();
3035 return os << "] = " << NameOf(value());
3036}
3037
3038
3039std::ostream& HStoreKeyedGeneric::PrintDataTo(
3040 std::ostream& os) const { // NOLINT
3041 return os << NameOf(object()) << "[" << NameOf(key())
3042 << "] = " << NameOf(value());
3043}
3044
3045
3046std::ostream& HTransitionElementsKind::PrintDataTo(
3047 std::ostream& os) const { // NOLINT
3048 os << NameOf(object());
3049 ElementsKind from_kind = original_map().handle()->elements_kind();
3050 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3051 os << " " << *original_map().handle() << " ["
3052 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3053 << *transitioned_map().handle() << " ["
3054 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3055 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3056 return os;
3057}
3058
3059
3060std::ostream& HLoadGlobalGeneric::PrintDataTo(
3061 std::ostream& os) const { // NOLINT
3062 return os << name()->ToCString().get() << " ";
3063}
3064
3065
3066std::ostream& HInnerAllocatedObject::PrintDataTo(
3067 std::ostream& os) const { // NOLINT
3068 os << NameOf(base_object()) << " offset ";
3069 return offset()->PrintTo(os);
3070}
3071
3072
3073std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3074 return os << NameOf(value()) << "[" << slot_index() << "]";
3075}
3076
3077
3078std::ostream& HStoreContextSlot::PrintDataTo(
3079 std::ostream& os) const { // NOLINT
3080 return os << NameOf(context()) << "[" << slot_index()
3081 << "] = " << NameOf(value());
3082}
3083
3084
3085// Implementation of type inference and type conversions. Calculates
3086// the inferred type of this instruction based on the input operands.
3087
3088HType HValue::CalculateInferredType() {
3089 return type_;
3090}
3091
3092
3093HType HPhi::CalculateInferredType() {
3094 if (OperandCount() == 0) return HType::Tagged();
3095 HType result = OperandAt(0)->type();
3096 for (int i = 1; i < OperandCount(); ++i) {
3097 HType current = OperandAt(i)->type();
3098 result = result.Combine(current);
3099 }
3100 return result;
3101}
3102
3103
3104HType HChange::CalculateInferredType() {
3105 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3106 return type();
3107}
3108
3109
3110Representation HUnaryMathOperation::RepresentationFromInputs() {
3111 if (SupportsFlexibleFloorAndRound() &&
3112 (op_ == kMathFloor || op_ == kMathRound)) {
3113 // Floor and Round always take a double input. The integral result can be
3114 // used as an integer or a double. Infer the representation from the uses.
3115 return Representation::None();
3116 }
3117 Representation rep = representation();
3118 // If any of the actual input representation is more general than what we
3119 // have so far but not Tagged, use that representation instead.
3120 Representation input_rep = value()->representation();
3121 if (!input_rep.IsTagged()) {
3122 rep = rep.generalize(input_rep);
3123 }
3124 return rep;
3125}
3126
3127
3128bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3129 HValue* dominator) {
3130 DCHECK(side_effect == kNewSpacePromotion);
Ben Murdochc5610432016-08-08 18:44:38 +01003131 DCHECK(!IsAllocationFolded());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003132 Zone* zone = block()->zone();
3133 Isolate* isolate = block()->isolate();
3134 if (!FLAG_use_allocation_folding) return false;
3135
3136 // Try to fold allocations together with their dominating allocations.
3137 if (!dominator->IsAllocate()) {
3138 if (FLAG_trace_allocation_folding) {
3139 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3140 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3141 }
3142 return false;
3143 }
3144
3145 // Check whether we are folding within the same block for local folding.
3146 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3147 if (FLAG_trace_allocation_folding) {
3148 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3149 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3150 }
3151 return false;
3152 }
3153
3154 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3155 HValue* dominator_size = dominator_allocate->size();
3156 HValue* current_size = size();
3157
3158 // TODO(hpayer): Add support for non-constant allocation in dominator.
Ben Murdochc5610432016-08-08 18:44:38 +01003159 if (!current_size->IsInteger32Constant() ||
3160 !dominator_size->IsInteger32Constant()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003161 if (FLAG_trace_allocation_folding) {
3162 PrintF("#%d (%s) cannot fold into #%d (%s), "
3163 "dynamic allocation size in dominator\n",
3164 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3165 }
3166 return false;
3167 }
3168
3169
3170 if (!IsFoldable(dominator_allocate)) {
3171 if (FLAG_trace_allocation_folding) {
3172 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
3173 Mnemonic(), dominator->id(), dominator->Mnemonic());
3174 }
3175 return false;
3176 }
3177
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003178 DCHECK(
3179 (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
3180 (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
3181
3182 // First update the size of the dominator allocate instruction.
3183 dominator_size = dominator_allocate->size();
3184 int32_t original_object_size =
3185 HConstant::cast(dominator_size)->GetInteger32Constant();
3186 int32_t dominator_size_constant = original_object_size;
3187
3188 if (MustAllocateDoubleAligned()) {
3189 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3190 dominator_size_constant += kDoubleSize / 2;
3191 }
3192 }
3193
Ben Murdochc5610432016-08-08 18:44:38 +01003194 int32_t current_size_max_value = size()->GetInteger32Constant();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003195 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3196
3197 // Since we clear the first word after folded memory, we cannot use the
3198 // whole Page::kMaxRegularHeapObjectSize memory.
3199 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3200 if (FLAG_trace_allocation_folding) {
3201 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3202 id(), Mnemonic(), dominator_allocate->id(),
3203 dominator_allocate->Mnemonic(), new_dominator_size);
3204 }
3205 return false;
3206 }
3207
Ben Murdochc5610432016-08-08 18:44:38 +01003208 HInstruction* new_dominator_size_value = HConstant::CreateAndInsertBefore(
3209 isolate, zone, context(), new_dominator_size, Representation::None(),
3210 dominator_allocate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003211
3212 dominator_allocate->UpdateSize(new_dominator_size_value);
3213
3214 if (MustAllocateDoubleAligned()) {
3215 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3216 dominator_allocate->MakeDoubleAligned();
3217 }
3218 }
3219
Ben Murdochc5610432016-08-08 18:44:38 +01003220 if (IsAllocationFoldingDominator()) {
3221 DeleteAndReplaceWith(dominator_allocate);
3222 if (FLAG_trace_allocation_folding) {
3223 PrintF(
3224 "#%d (%s) folded dominator into #%d (%s), new dominator size: %d\n",
3225 id(), Mnemonic(), dominator_allocate->id(),
3226 dominator_allocate->Mnemonic(), new_dominator_size);
3227 }
3228 return true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003229 }
3230
Ben Murdochc5610432016-08-08 18:44:38 +01003231 if (!dominator_allocate->IsAllocationFoldingDominator()) {
3232 HAllocate* first_alloc =
3233 HAllocate::New(isolate, zone, dominator_allocate->context(),
3234 dominator_size, dominator_allocate->type(),
3235 IsNewSpaceAllocation() ? NOT_TENURED : TENURED,
3236 JS_OBJECT_TYPE, block()->graph()->GetConstant0());
3237 first_alloc->InsertAfter(dominator_allocate);
3238 dominator_allocate->ReplaceAllUsesWith(first_alloc);
3239 dominator_allocate->MakeAllocationFoldingDominator();
3240 first_alloc->MakeFoldedAllocation(dominator_allocate);
3241 if (FLAG_trace_allocation_folding) {
3242 PrintF("#%d (%s) inserted for dominator #%d (%s)\n", first_alloc->id(),
3243 first_alloc->Mnemonic(), dominator_allocate->id(),
3244 dominator_allocate->Mnemonic());
3245 }
3246 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003247
Ben Murdochc5610432016-08-08 18:44:38 +01003248 MakeFoldedAllocation(dominator_allocate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003249
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003250 if (FLAG_trace_allocation_folding) {
Ben Murdochc5610432016-08-08 18:44:38 +01003251 PrintF("#%d (%s) folded into #%d (%s), new dominator size: %d\n", id(),
3252 Mnemonic(), dominator_allocate->id(), dominator_allocate->Mnemonic(),
3253 new_dominator_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003254 }
3255 return true;
3256}
3257
3258
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003259std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
3260 os << NameOf(size()) << " (";
3261 if (IsNewSpaceAllocation()) os << "N";
3262 if (IsOldSpaceAllocation()) os << "P";
3263 if (MustAllocateDoubleAligned()) os << "A";
3264 if (MustPrefillWithFiller()) os << "F";
3265 return os << ")";
3266}
3267
3268
3269bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3270 // The base offset is usually simply the size of the array header, except
3271 // with dehoisting adds an addition offset due to a array index key
3272 // manipulation, in which case it becomes (array header size +
3273 // constant-offset-from-key * kPointerSize)
3274 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
3275 addition_result += increase_by_value;
3276 if (!addition_result.IsValid()) return false;
3277 base_offset_ = addition_result.ValueOrDie();
3278 return true;
3279}
3280
3281
3282bool HStoreKeyed::NeedsCanonicalization() {
3283 switch (value()->opcode()) {
3284 case kLoadKeyed: {
3285 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
3286 return IsFixedFloatElementsKind(load_kind);
3287 }
3288 case kChange: {
3289 Representation from = HChange::cast(value())->from();
3290 return from.IsTagged() || from.IsHeapObject();
3291 }
3292 case kLoadNamedField:
3293 case kPhi: {
3294 // Better safe than sorry...
3295 return true;
3296 }
3297 default:
3298 return false;
3299 }
3300}
3301
3302
3303#define H_CONSTANT_INT(val) \
3304 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
3305#define H_CONSTANT_DOUBLE(val) \
3306 HConstant::New(isolate, zone, context, static_cast<double>(val))
3307
Ben Murdoch097c5b22016-05-18 11:27:45 +01003308#define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3309 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3310 HValue* left, HValue* right) { \
3311 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3312 HConstant* c_left = HConstant::cast(left); \
3313 HConstant* c_right = HConstant::cast(right); \
3314 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3315 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3316 if (IsInt32Double(double_res)) { \
3317 return H_CONSTANT_INT(double_res); \
3318 } \
3319 return H_CONSTANT_DOUBLE(double_res); \
3320 } \
3321 } \
3322 return new (zone) HInstr(context, left, right); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003323 }
3324
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003325DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3326DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3327DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3328
3329#undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3330
3331
3332HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
3333 HValue* left, HValue* right,
3334 PretenureFlag pretenure_flag,
3335 StringAddFlags flags,
3336 Handle<AllocationSite> allocation_site) {
3337 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3338 HConstant* c_right = HConstant::cast(right);
3339 HConstant* c_left = HConstant::cast(left);
3340 if (c_left->HasStringValue() && c_right->HasStringValue()) {
3341 Handle<String> left_string = c_left->StringValue();
3342 Handle<String> right_string = c_right->StringValue();
3343 // Prevent possible exception by invalid string length.
3344 if (left_string->length() + right_string->length() < String::kMaxLength) {
3345 MaybeHandle<String> concat = isolate->factory()->NewConsString(
3346 c_left->StringValue(), c_right->StringValue());
3347 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
3348 }
3349 }
3350 }
3351 return new (zone)
3352 HStringAdd(context, left, right, pretenure_flag, flags, allocation_site);
3353}
3354
3355
3356std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
3357 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3358 os << "_CheckBoth";
3359 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
3360 os << "_CheckLeft";
3361 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
3362 os << "_CheckRight";
3363 }
3364 HBinaryOperation::PrintDataTo(os);
3365 os << " (";
3366 if (pretenure_flag() == NOT_TENURED)
3367 os << "N";
3368 else if (pretenure_flag() == TENURED)
3369 os << "D";
3370 return os << ")";
3371}
3372
3373
3374HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
3375 HValue* context, HValue* char_code) {
3376 if (FLAG_fold_constants && char_code->IsConstant()) {
3377 HConstant* c_code = HConstant::cast(char_code);
3378 if (c_code->HasNumberValue()) {
3379 if (std::isfinite(c_code->DoubleValue())) {
3380 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
3381 return HConstant::New(
3382 isolate, zone, context,
3383 isolate->factory()->LookupSingleCharacterStringFromCode(code));
3384 }
3385 return HConstant::New(isolate, zone, context,
3386 isolate->factory()->empty_string());
3387 }
3388 }
3389 return new(zone) HStringCharFromCode(context, char_code);
3390}
3391
3392
3393HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
3394 HValue* context, HValue* value,
3395 BuiltinFunctionId op) {
3396 do {
3397 if (!FLAG_fold_constants) break;
3398 if (!value->IsConstant()) break;
3399 HConstant* constant = HConstant::cast(value);
3400 if (!constant->HasNumberValue()) break;
3401 double d = constant->DoubleValue();
3402 if (std::isnan(d)) { // NaN poisons everything.
3403 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3404 }
3405 if (std::isinf(d)) { // +Infinity and -Infinity.
3406 switch (op) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01003407 case kMathCos:
3408 case kMathSin:
3409 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003410 case kMathExp:
3411 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
3412 case kMathLog:
3413 case kMathSqrt:
3414 return H_CONSTANT_DOUBLE(
3415 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
3416 case kMathPowHalf:
3417 case kMathAbs:
3418 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
3419 case kMathRound:
3420 case kMathFround:
3421 case kMathFloor:
3422 return H_CONSTANT_DOUBLE(d);
3423 case kMathClz32:
3424 return H_CONSTANT_INT(32);
3425 default:
3426 UNREACHABLE();
3427 break;
3428 }
3429 }
3430 switch (op) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01003431 case kMathCos:
3432 return H_CONSTANT_DOUBLE(base::ieee754::cos(d));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003433 case kMathExp:
Ben Murdoch61f157c2016-09-16 13:49:30 +01003434 return H_CONSTANT_DOUBLE(base::ieee754::exp(d));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003435 case kMathLog:
Ben Murdoch61f157c2016-09-16 13:49:30 +01003436 return H_CONSTANT_DOUBLE(base::ieee754::log(d));
3437 case kMathSin:
3438 return H_CONSTANT_DOUBLE(base::ieee754::sin(d));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003439 case kMathSqrt:
3440 lazily_initialize_fast_sqrt(isolate);
3441 return H_CONSTANT_DOUBLE(fast_sqrt(d, isolate));
3442 case kMathPowHalf:
3443 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
3444 case kMathAbs:
3445 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
3446 case kMathRound:
3447 // -0.5 .. -0.0 round to -0.0.
3448 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
3449 // Doubles are represented as Significant * 2 ^ Exponent. If the
3450 // Exponent is not negative, the double value is already an integer.
3451 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
3452 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
3453 case kMathFround:
3454 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
3455 case kMathFloor:
3456 return H_CONSTANT_DOUBLE(Floor(d));
3457 case kMathClz32: {
3458 uint32_t i = DoubleToUint32(d);
3459 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
3460 }
3461 default:
3462 UNREACHABLE();
3463 break;
3464 }
3465 } while (false);
3466 return new(zone) HUnaryMathOperation(context, value, op);
3467}
3468
3469
3470Representation HUnaryMathOperation::RepresentationFromUses() {
3471 if (op_ != kMathFloor && op_ != kMathRound) {
3472 return HValue::RepresentationFromUses();
3473 }
3474
3475 // The instruction can have an int32 or double output. Prefer a double
3476 // representation if there are double uses.
3477 bool use_double = false;
3478
3479 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3480 HValue* use = it.value();
3481 int use_index = it.index();
3482 Representation rep_observed = use->observed_input_representation(use_index);
3483 Representation rep_required = use->RequiredInputRepresentation(use_index);
3484 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
3485 if (use_double && !FLAG_trace_representation) {
3486 // Having seen one double is enough.
3487 break;
3488 }
3489 if (FLAG_trace_representation) {
3490 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
3491 PrintF("#%d %s is used by #%d %s as %s%s\n",
3492 id(), Mnemonic(), use->id(),
3493 use->Mnemonic(), rep_observed.Mnemonic(),
3494 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
3495 } else {
3496 PrintF("#%d %s is required by #%d %s as %s%s\n",
3497 id(), Mnemonic(), use->id(),
3498 use->Mnemonic(), rep_required.Mnemonic(),
3499 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
3500 }
3501 }
3502 }
3503 return use_double ? Representation::Double() : Representation::Integer32();
3504}
3505
3506
3507HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
3508 HValue* left, HValue* right) {
3509 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3510 HConstant* c_left = HConstant::cast(left);
3511 HConstant* c_right = HConstant::cast(right);
3512 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3513 double result =
3514 power_helper(isolate, c_left->DoubleValue(), c_right->DoubleValue());
3515 return H_CONSTANT_DOUBLE(std::isnan(result)
3516 ? std::numeric_limits<double>::quiet_NaN()
3517 : result);
3518 }
3519 }
3520 return new(zone) HPower(left, right);
3521}
3522
3523
3524HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
3525 HValue* left, HValue* right, Operation op) {
3526 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3527 HConstant* c_left = HConstant::cast(left);
3528 HConstant* c_right = HConstant::cast(right);
3529 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3530 double d_left = c_left->DoubleValue();
3531 double d_right = c_right->DoubleValue();
3532 if (op == kMathMin) {
3533 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
3534 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
3535 if (d_left == d_right) {
3536 // Handle +0 and -0.
3537 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
3538 : d_right);
3539 }
3540 } else {
3541 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
3542 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
3543 if (d_left == d_right) {
3544 // Handle +0 and -0.
3545 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
3546 : d_left);
3547 }
3548 }
3549 // All comparisons failed, must be NaN.
3550 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3551 }
3552 }
3553 return new(zone) HMathMinMax(context, left, right, op);
3554}
3555
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003556HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003557 HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003558 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3559 HConstant* c_left = HConstant::cast(left);
3560 HConstant* c_right = HConstant::cast(right);
3561 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
3562 int32_t dividend = c_left->Integer32Value();
3563 int32_t divisor = c_right->Integer32Value();
3564 if (dividend == kMinInt && divisor == -1) {
3565 return H_CONSTANT_DOUBLE(-0.0);
3566 }
3567 if (divisor != 0) {
3568 int32_t res = dividend % divisor;
3569 if ((res == 0) && (dividend < 0)) {
3570 return H_CONSTANT_DOUBLE(-0.0);
3571 }
3572 return H_CONSTANT_INT(res);
3573 }
3574 }
3575 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003576 return new (zone) HMod(context, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003577}
3578
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003579HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003580 HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003581 // If left and right are constant values, try to return a constant value.
3582 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3583 HConstant* c_left = HConstant::cast(left);
3584 HConstant* c_right = HConstant::cast(right);
3585 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3586 if (c_right->DoubleValue() != 0) {
3587 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
3588 if (IsInt32Double(double_res)) {
3589 return H_CONSTANT_INT(double_res);
3590 }
3591 return H_CONSTANT_DOUBLE(double_res);
3592 } else {
3593 int sign = Double(c_left->DoubleValue()).Sign() *
3594 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
3595 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
3596 }
3597 }
3598 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003599 return new (zone) HDiv(context, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003600}
3601
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003602HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003603 Token::Value op, HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003604 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3605 HConstant* c_left = HConstant::cast(left);
3606 HConstant* c_right = HConstant::cast(right);
3607 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3608 int32_t result;
3609 int32_t v_left = c_left->NumberValueAsInteger32();
3610 int32_t v_right = c_right->NumberValueAsInteger32();
3611 switch (op) {
3612 case Token::BIT_XOR:
3613 result = v_left ^ v_right;
3614 break;
3615 case Token::BIT_AND:
3616 result = v_left & v_right;
3617 break;
3618 case Token::BIT_OR:
3619 result = v_left | v_right;
3620 break;
3621 default:
3622 result = 0; // Please the compiler.
3623 UNREACHABLE();
3624 }
3625 return H_CONSTANT_INT(result);
3626 }
3627 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003628 return new (zone) HBitwise(context, op, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003629}
3630
Ben Murdoch097c5b22016-05-18 11:27:45 +01003631#define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
3632 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3633 HValue* left, HValue* right) { \
3634 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3635 HConstant* c_left = HConstant::cast(left); \
3636 HConstant* c_right = HConstant::cast(right); \
3637 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3638 return H_CONSTANT_INT(result); \
3639 } \
3640 } \
3641 return new (zone) HInstr(context, left, right); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003642 }
3643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003644DEFINE_NEW_H_BITWISE_INSTR(HSar,
3645c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
3646DEFINE_NEW_H_BITWISE_INSTR(HShl,
3647c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
3648
3649#undef DEFINE_NEW_H_BITWISE_INSTR
3650
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003651HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
Ben Murdoch097c5b22016-05-18 11:27:45 +01003652 HValue* left, HValue* right) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003653 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3654 HConstant* c_left = HConstant::cast(left);
3655 HConstant* c_right = HConstant::cast(right);
3656 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3657 int32_t left_val = c_left->NumberValueAsInteger32();
3658 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
3659 if ((right_val == 0) && (left_val < 0)) {
3660 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
3661 }
3662 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
3663 }
3664 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01003665 return new (zone) HShr(context, left, right);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00003666}
3667
3668
3669HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
3670 HValue* context, String::Encoding encoding,
3671 HValue* string, HValue* index) {
3672 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
3673 HConstant* c_string = HConstant::cast(string);
3674 HConstant* c_index = HConstant::cast(index);
3675 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
3676 Handle<String> s = c_string->StringValue();
3677 int32_t i = c_index->Integer32Value();
3678 DCHECK_LE(0, i);
3679 DCHECK_LT(i, s->length());
3680 return H_CONSTANT_INT(s->Get(i));
3681 }
3682 }
3683 return new(zone) HSeqStringGetChar(encoding, string, index);
3684}
3685
3686
3687#undef H_CONSTANT_INT
3688#undef H_CONSTANT_DOUBLE
3689
3690
3691std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
3692 os << Token::Name(op_) << " ";
3693 return HBitwiseBinaryOperation::PrintDataTo(os);
3694}
3695
3696
3697void HPhi::SimplifyConstantInputs() {
3698 // Convert constant inputs to integers when all uses are truncating.
3699 // This must happen before representation inference takes place.
3700 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
3701 for (int i = 0; i < OperandCount(); ++i) {
3702 if (!OperandAt(i)->IsConstant()) return;
3703 }
3704 HGraph* graph = block()->graph();
3705 for (int i = 0; i < OperandCount(); ++i) {
3706 HConstant* operand = HConstant::cast(OperandAt(i));
3707 if (operand->HasInteger32Value()) {
3708 continue;
3709 } else if (operand->HasDoubleValue()) {
3710 HConstant* integer_input = HConstant::New(
3711 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
3712 DoubleToInt32(operand->DoubleValue()));
3713 integer_input->InsertAfter(operand);
3714 SetOperandAt(i, integer_input);
3715 } else if (operand->HasBooleanValue()) {
3716 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
3717 : graph->GetConstant0());
3718 } else if (operand->ImmortalImmovable()) {
3719 SetOperandAt(i, graph->GetConstant0());
3720 }
3721 }
3722 // Overwrite observed input representations because they are likely Tagged.
3723 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3724 HValue* use = it.value();
3725 if (use->IsBinaryOperation()) {
3726 HBinaryOperation::cast(use)->set_observed_input_representation(
3727 it.index(), Representation::Smi());
3728 }
3729 }
3730}
3731
3732
3733void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
3734 DCHECK(CheckFlag(kFlexibleRepresentation));
3735 Representation new_rep = RepresentationFromUses();
3736 UpdateRepresentation(new_rep, h_infer, "uses");
3737 new_rep = RepresentationFromInputs();
3738 UpdateRepresentation(new_rep, h_infer, "inputs");
3739 new_rep = RepresentationFromUseRequirements();
3740 UpdateRepresentation(new_rep, h_infer, "use requirements");
3741}
3742
3743
3744Representation HPhi::RepresentationFromInputs() {
3745 Representation r = representation();
3746 for (int i = 0; i < OperandCount(); ++i) {
3747 // Ignore conservative Tagged assumption of parameters if we have
3748 // reason to believe that it's too conservative.
3749 if (has_type_feedback_from_uses() && OperandAt(i)->IsParameter()) {
3750 continue;
3751 }
3752
3753 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
3754 }
3755 return r;
3756}
3757
3758
3759// Returns a representation if all uses agree on the same representation.
3760// Integer32 is also returned when some uses are Smi but others are Integer32.
3761Representation HValue::RepresentationFromUseRequirements() {
3762 Representation rep = Representation::None();
3763 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3764 // Ignore the use requirement from never run code
3765 if (it.value()->block()->IsUnreachable()) continue;
3766
3767 // We check for observed_input_representation elsewhere.
3768 Representation use_rep =
3769 it.value()->RequiredInputRepresentation(it.index());
3770 if (rep.IsNone()) {
3771 rep = use_rep;
3772 continue;
3773 }
3774 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
3775 if (rep.generalize(use_rep).IsInteger32()) {
3776 rep = Representation::Integer32();
3777 continue;
3778 }
3779 return Representation::None();
3780 }
3781 return rep;
3782}
3783
3784
3785bool HValue::HasNonSmiUse() {
3786 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3787 // We check for observed_input_representation elsewhere.
3788 Representation use_rep =
3789 it.value()->RequiredInputRepresentation(it.index());
3790 if (!use_rep.IsNone() &&
3791 !use_rep.IsSmi() &&
3792 !use_rep.IsTagged()) {
3793 return true;
3794 }
3795 }
3796 return false;
3797}
3798
3799
3800// Node-specific verification code is only included in debug mode.
3801#ifdef DEBUG
3802
3803void HPhi::Verify() {
3804 DCHECK(OperandCount() == block()->predecessors()->length());
3805 for (int i = 0; i < OperandCount(); ++i) {
3806 HValue* value = OperandAt(i);
3807 HBasicBlock* defining_block = value->block();
3808 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
3809 DCHECK(defining_block == predecessor_block ||
3810 defining_block->Dominates(predecessor_block));
3811 }
3812}
3813
3814
3815void HSimulate::Verify() {
3816 HInstruction::Verify();
3817 DCHECK(HasAstId() || next()->IsEnterInlined());
3818}
3819
3820
3821void HCheckHeapObject::Verify() {
3822 HInstruction::Verify();
3823 DCHECK(HasNoUses());
3824}
3825
3826
3827void HCheckValue::Verify() {
3828 HInstruction::Verify();
3829 DCHECK(HasNoUses());
3830}
3831
3832#endif
3833
3834
3835HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
3836 DCHECK(offset >= 0);
3837 DCHECK(offset < FixedArray::kHeaderSize);
3838 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
3839 return HObjectAccess(kInobject, offset);
3840}
3841
3842
3843HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
3844 Representation representation) {
3845 DCHECK(offset >= 0);
3846 Portion portion = kInobject;
3847
3848 if (offset == JSObject::kElementsOffset) {
3849 portion = kElementsPointer;
3850 } else if (offset == JSObject::kMapOffset) {
3851 portion = kMaps;
3852 }
3853 bool existing_inobject_property = true;
3854 if (!map.is_null()) {
3855 existing_inobject_property = (offset <
3856 map->instance_size() - map->unused_property_fields() * kPointerSize);
3857 }
3858 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
3859 false, existing_inobject_property);
3860}
3861
3862
3863HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
3864 switch (offset) {
3865 case AllocationSite::kTransitionInfoOffset:
3866 return HObjectAccess(kInobject, offset, Representation::Tagged());
3867 case AllocationSite::kNestedSiteOffset:
3868 return HObjectAccess(kInobject, offset, Representation::Tagged());
3869 case AllocationSite::kPretenureDataOffset:
3870 return HObjectAccess(kInobject, offset, Representation::Smi());
3871 case AllocationSite::kPretenureCreateCountOffset:
3872 return HObjectAccess(kInobject, offset, Representation::Smi());
3873 case AllocationSite::kDependentCodeOffset:
3874 return HObjectAccess(kInobject, offset, Representation::Tagged());
3875 case AllocationSite::kWeakNextOffset:
3876 return HObjectAccess(kInobject, offset, Representation::Tagged());
3877 default:
3878 UNREACHABLE();
3879 }
3880 return HObjectAccess(kInobject, offset);
3881}
3882
3883
3884HObjectAccess HObjectAccess::ForContextSlot(int index) {
3885 DCHECK(index >= 0);
3886 Portion portion = kInobject;
3887 int offset = Context::kHeaderSize + index * kPointerSize;
3888 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
3889 return HObjectAccess(portion, offset, Representation::Tagged());
3890}
3891
3892
3893HObjectAccess HObjectAccess::ForScriptContext(int index) {
3894 DCHECK(index >= 0);
3895 Portion portion = kInobject;
3896 int offset = ScriptContextTable::GetContextOffset(index);
3897 return HObjectAccess(portion, offset, Representation::Tagged());
3898}
3899
3900
3901HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
3902 DCHECK(offset >= 0);
3903 Portion portion = kInobject;
3904
3905 if (offset == JSObject::kElementsOffset) {
3906 portion = kElementsPointer;
3907 } else if (offset == JSArray::kLengthOffset) {
3908 portion = kArrayLengths;
3909 } else if (offset == JSObject::kMapOffset) {
3910 portion = kMaps;
3911 }
3912 return HObjectAccess(portion, offset);
3913}
3914
3915
3916HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
3917 Representation representation) {
3918 DCHECK(offset >= 0);
3919 return HObjectAccess(kBackingStore, offset, representation,
3920 Handle<String>::null(), false, false);
3921}
3922
3923
3924HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
3925 Representation representation,
3926 Handle<Name> name) {
3927 if (index < 0) {
3928 // Negative property indices are in-object properties, indexed
3929 // from the end of the fixed part of the object.
3930 int offset = (index * kPointerSize) + map->instance_size();
3931 return HObjectAccess(kInobject, offset, representation, name, false, true);
3932 } else {
3933 // Non-negative property indices are in the properties array.
3934 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
3935 return HObjectAccess(kBackingStore, offset, representation, name,
3936 false, false);
3937 }
3938}
3939
3940
3941void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
3942 // set the appropriate GVN flags for a given load or store instruction
3943 if (access_type == STORE) {
3944 // track dominating allocations in order to eliminate write barriers
3945 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
3946 instr->SetFlag(HValue::kTrackSideEffectDominators);
3947 } else {
3948 // try to GVN loads, but don't hoist above map changes
3949 instr->SetFlag(HValue::kUseGVN);
3950 instr->SetDependsOnFlag(::v8::internal::kMaps);
3951 }
3952
3953 switch (portion()) {
3954 case kArrayLengths:
3955 if (access_type == STORE) {
3956 instr->SetChangesFlag(::v8::internal::kArrayLengths);
3957 } else {
3958 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
3959 }
3960 break;
3961 case kStringLengths:
3962 if (access_type == STORE) {
3963 instr->SetChangesFlag(::v8::internal::kStringLengths);
3964 } else {
3965 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
3966 }
3967 break;
3968 case kInobject:
3969 if (access_type == STORE) {
3970 instr->SetChangesFlag(::v8::internal::kInobjectFields);
3971 } else {
3972 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
3973 }
3974 break;
3975 case kDouble:
3976 if (access_type == STORE) {
3977 instr->SetChangesFlag(::v8::internal::kDoubleFields);
3978 } else {
3979 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
3980 }
3981 break;
3982 case kBackingStore:
3983 if (access_type == STORE) {
3984 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
3985 } else {
3986 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
3987 }
3988 break;
3989 case kElementsPointer:
3990 if (access_type == STORE) {
3991 instr->SetChangesFlag(::v8::internal::kElementsPointer);
3992 } else {
3993 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
3994 }
3995 break;
3996 case kMaps:
3997 if (access_type == STORE) {
3998 instr->SetChangesFlag(::v8::internal::kMaps);
3999 } else {
4000 instr->SetDependsOnFlag(::v8::internal::kMaps);
4001 }
4002 break;
4003 case kExternalMemory:
4004 if (access_type == STORE) {
4005 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4006 } else {
4007 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4008 }
4009 break;
4010 }
4011}
4012
4013
4014std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4015 os << ".";
4016
4017 switch (access.portion()) {
4018 case HObjectAccess::kArrayLengths:
4019 case HObjectAccess::kStringLengths:
4020 os << "%length";
4021 break;
4022 case HObjectAccess::kElementsPointer:
4023 os << "%elements";
4024 break;
4025 case HObjectAccess::kMaps:
4026 os << "%map";
4027 break;
4028 case HObjectAccess::kDouble: // fall through
4029 case HObjectAccess::kInobject:
4030 if (!access.name().is_null() && access.name()->IsString()) {
4031 os << Handle<String>::cast(access.name())->ToCString().get();
4032 }
4033 os << "[in-object]";
4034 break;
4035 case HObjectAccess::kBackingStore:
4036 if (!access.name().is_null() && access.name()->IsString()) {
4037 os << Handle<String>::cast(access.name())->ToCString().get();
4038 }
4039 os << "[backing-store]";
4040 break;
4041 case HObjectAccess::kExternalMemory:
4042 os << "[external-memory]";
4043 break;
4044 }
4045
4046 return os << "@" << access.offset();
4047}
4048
4049} // namespace internal
4050} // namespace v8