blob: 98337be052ebcd19da03c47b023701aecb80d3e8 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen.h"
6
7#include <sstream>
8
9#include "src/allocation-site-scopes.h"
10#include "src/ast/ast-numbering.h"
11#include "src/ast/scopeinfo.h"
12#include "src/code-factory.h"
13#include "src/crankshaft/hydrogen-bce.h"
14#include "src/crankshaft/hydrogen-bch.h"
15#include "src/crankshaft/hydrogen-canonicalize.h"
16#include "src/crankshaft/hydrogen-check-elimination.h"
17#include "src/crankshaft/hydrogen-dce.h"
18#include "src/crankshaft/hydrogen-dehoist.h"
19#include "src/crankshaft/hydrogen-environment-liveness.h"
20#include "src/crankshaft/hydrogen-escape-analysis.h"
21#include "src/crankshaft/hydrogen-gvn.h"
22#include "src/crankshaft/hydrogen-infer-representation.h"
23#include "src/crankshaft/hydrogen-infer-types.h"
24#include "src/crankshaft/hydrogen-load-elimination.h"
25#include "src/crankshaft/hydrogen-mark-deoptimize.h"
26#include "src/crankshaft/hydrogen-mark-unreachable.h"
27#include "src/crankshaft/hydrogen-osr.h"
28#include "src/crankshaft/hydrogen-range-analysis.h"
29#include "src/crankshaft/hydrogen-redundant-phi.h"
30#include "src/crankshaft/hydrogen-removable-simulates.h"
31#include "src/crankshaft/hydrogen-representation-changes.h"
32#include "src/crankshaft/hydrogen-sce.h"
33#include "src/crankshaft/hydrogen-store-elimination.h"
34#include "src/crankshaft/hydrogen-uint32-analysis.h"
35#include "src/crankshaft/lithium-allocator.h"
36#include "src/crankshaft/typing.h"
37#include "src/full-codegen/full-codegen.h"
38#include "src/ic/call-optimization.h"
39#include "src/ic/ic.h"
40// GetRootConstructor
41#include "src/ic/ic-inl.h"
42#include "src/isolate-inl.h"
43#include "src/parsing/parser.h"
44#include "src/runtime/runtime.h"
45
46#if V8_TARGET_ARCH_IA32
47#include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
48#elif V8_TARGET_ARCH_X64
49#include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
50#elif V8_TARGET_ARCH_ARM64
51#include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
52#elif V8_TARGET_ARCH_ARM
53#include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
54#elif V8_TARGET_ARCH_PPC
55#include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
56#elif V8_TARGET_ARCH_MIPS
57#include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
58#elif V8_TARGET_ARCH_MIPS64
59#include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
60#elif V8_TARGET_ARCH_X87
61#include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
62#else
63#error Unsupported target architecture.
64#endif
65
66namespace v8 {
67namespace internal {
68
69HBasicBlock::HBasicBlock(HGraph* graph)
70 : block_id_(graph->GetNextBlockID()),
71 graph_(graph),
72 phis_(4, graph->zone()),
73 first_(NULL),
74 last_(NULL),
75 end_(NULL),
76 loop_information_(NULL),
77 predecessors_(2, graph->zone()),
78 dominator_(NULL),
79 dominated_blocks_(4, graph->zone()),
80 last_environment_(NULL),
81 argument_count_(-1),
82 first_instruction_index_(-1),
83 last_instruction_index_(-1),
84 deleted_phis_(4, graph->zone()),
85 parent_loop_header_(NULL),
86 inlined_entry_block_(NULL),
87 is_inline_return_target_(false),
88 is_reachable_(true),
89 dominates_loop_successors_(false),
90 is_osr_entry_(false),
91 is_ordered_(false) { }
92
93
94Isolate* HBasicBlock::isolate() const {
95 return graph_->isolate();
96}
97
98
99void HBasicBlock::MarkUnreachable() {
100 is_reachable_ = false;
101}
102
103
104void HBasicBlock::AttachLoopInformation() {
105 DCHECK(!IsLoopHeader());
106 loop_information_ = new(zone()) HLoopInformation(this, zone());
107}
108
109
110void HBasicBlock::DetachLoopInformation() {
111 DCHECK(IsLoopHeader());
112 loop_information_ = NULL;
113}
114
115
116void HBasicBlock::AddPhi(HPhi* phi) {
117 DCHECK(!IsStartBlock());
118 phis_.Add(phi, zone());
119 phi->SetBlock(this);
120}
121
122
123void HBasicBlock::RemovePhi(HPhi* phi) {
124 DCHECK(phi->block() == this);
125 DCHECK(phis_.Contains(phi));
126 phi->Kill();
127 phis_.RemoveElement(phi);
128 phi->SetBlock(NULL);
129}
130
131
132void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) {
133 DCHECK(!IsStartBlock() || !IsFinished());
134 DCHECK(!instr->IsLinked());
135 DCHECK(!IsFinished());
136
137 if (!position.IsUnknown()) {
138 instr->set_position(position);
139 }
140 if (first_ == NULL) {
141 DCHECK(last_environment() != NULL);
142 DCHECK(!last_environment()->ast_id().IsNone());
143 HBlockEntry* entry = new(zone()) HBlockEntry();
144 entry->InitializeAsFirst(this);
145 if (!position.IsUnknown()) {
146 entry->set_position(position);
147 } else {
148 DCHECK(!FLAG_hydrogen_track_positions ||
149 !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
150 }
151 first_ = last_ = entry;
152 }
153 instr->InsertAfter(last_);
154}
155
156
157HPhi* HBasicBlock::AddNewPhi(int merged_index) {
158 if (graph()->IsInsideNoSideEffectsScope()) {
159 merged_index = HPhi::kInvalidMergedIndex;
160 }
161 HPhi* phi = new(zone()) HPhi(merged_index, zone());
162 AddPhi(phi);
163 return phi;
164}
165
166
167HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
168 RemovableSimulate removable) {
169 DCHECK(HasEnvironment());
170 HEnvironment* environment = last_environment();
171 DCHECK(ast_id.IsNone() ||
172 ast_id == BailoutId::StubEntry() ||
173 environment->closure()->shared()->VerifyBailoutId(ast_id));
174
175 int push_count = environment->push_count();
176 int pop_count = environment->pop_count();
177
178 HSimulate* instr =
179 new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
180#ifdef DEBUG
181 instr->set_closure(environment->closure());
182#endif
183 // Order of pushed values: newest (top of stack) first. This allows
184 // HSimulate::MergeWith() to easily append additional pushed values
185 // that are older (from further down the stack).
186 for (int i = 0; i < push_count; ++i) {
187 instr->AddPushedValue(environment->ExpressionStackAt(i));
188 }
189 for (GrowableBitVector::Iterator it(environment->assigned_variables(),
190 zone());
191 !it.Done();
192 it.Advance()) {
193 int index = it.Current();
194 instr->AddAssignedValue(index, environment->Lookup(index));
195 }
196 environment->ClearHistory();
197 return instr;
198}
199
200
201void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) {
202 DCHECK(!IsFinished());
203 AddInstruction(end, position);
204 end_ = end;
205 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
206 it.Current()->RegisterPredecessor(this);
207 }
208}
209
210
211void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position,
212 FunctionState* state, bool add_simulate) {
213 bool drop_extra = state != NULL &&
214 state->inlining_kind() == NORMAL_RETURN;
215
216 if (block->IsInlineReturnTarget()) {
217 HEnvironment* env = last_environment();
218 int argument_count = env->arguments_environment()->parameter_count();
219 AddInstruction(new(zone())
220 HLeaveInlined(state->entry(), argument_count),
221 position);
222 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
223 }
224
225 if (add_simulate) AddNewSimulate(BailoutId::None(), position);
226 HGoto* instr = new(zone()) HGoto(block);
227 Finish(instr, position);
228}
229
230
231void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state,
232 SourcePosition position) {
233 HBasicBlock* target = state->function_return();
234 bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
235
236 DCHECK(target->IsInlineReturnTarget());
237 DCHECK(return_value != NULL);
238 HEnvironment* env = last_environment();
239 int argument_count = env->arguments_environment()->parameter_count();
240 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
241 position);
242 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
243 last_environment()->Push(return_value);
244 AddNewSimulate(BailoutId::None(), position);
245 HGoto* instr = new(zone()) HGoto(target);
246 Finish(instr, position);
247}
248
249
250void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
251 DCHECK(!HasEnvironment());
252 DCHECK(first() == NULL);
253 UpdateEnvironment(env);
254}
255
256
257void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
258 last_environment_ = env;
259 graph()->update_maximum_environment_size(env->first_expression_index());
260}
261
262
263void HBasicBlock::SetJoinId(BailoutId ast_id) {
264 int length = predecessors_.length();
265 DCHECK(length > 0);
266 for (int i = 0; i < length; i++) {
267 HBasicBlock* predecessor = predecessors_[i];
268 DCHECK(predecessor->end()->IsGoto());
269 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
270 DCHECK(i != 0 ||
271 (predecessor->last_environment()->closure().is_null() ||
272 predecessor->last_environment()->closure()->shared()
273 ->VerifyBailoutId(ast_id)));
274 simulate->set_ast_id(ast_id);
275 predecessor->last_environment()->set_ast_id(ast_id);
276 }
277}
278
279
280bool HBasicBlock::Dominates(HBasicBlock* other) const {
281 HBasicBlock* current = other->dominator();
282 while (current != NULL) {
283 if (current == this) return true;
284 current = current->dominator();
285 }
286 return false;
287}
288
289
290bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
291 if (this == other) return true;
292 return Dominates(other);
293}
294
295
296int HBasicBlock::LoopNestingDepth() const {
297 const HBasicBlock* current = this;
298 int result = (current->IsLoopHeader()) ? 1 : 0;
299 while (current->parent_loop_header() != NULL) {
300 current = current->parent_loop_header();
301 result++;
302 }
303 return result;
304}
305
306
307void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
308 DCHECK(IsLoopHeader());
309
310 SetJoinId(stmt->EntryId());
311 if (predecessors()->length() == 1) {
312 // This is a degenerated loop.
313 DetachLoopInformation();
314 return;
315 }
316
317 // Only the first entry into the loop is from outside the loop. All other
318 // entries must be back edges.
319 for (int i = 1; i < predecessors()->length(); ++i) {
320 loop_information()->RegisterBackEdge(predecessors()->at(i));
321 }
322}
323
324
325void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
326 DCHECK(IsFinished());
327 HBasicBlock* succ_block = end()->SuccessorAt(succ);
328
329 DCHECK(succ_block->predecessors()->length() == 1);
330 succ_block->MarkUnreachable();
331}
332
333
334void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
335 if (HasPredecessor()) {
336 // Only loop header blocks can have a predecessor added after
337 // instructions have been added to the block (they have phis for all
338 // values in the environment, these phis may be eliminated later).
339 DCHECK(IsLoopHeader() || first_ == NULL);
340 HEnvironment* incoming_env = pred->last_environment();
341 if (IsLoopHeader()) {
342 DCHECK_EQ(phis()->length(), incoming_env->length());
343 for (int i = 0; i < phis_.length(); ++i) {
344 phis_[i]->AddInput(incoming_env->values()->at(i));
345 }
346 } else {
347 last_environment()->AddIncomingEdge(this, pred->last_environment());
348 }
349 } else if (!HasEnvironment() && !IsFinished()) {
350 DCHECK(!IsLoopHeader());
351 SetInitialEnvironment(pred->last_environment()->Copy());
352 }
353
354 predecessors_.Add(pred, zone());
355}
356
357
358void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
359 DCHECK(!dominated_blocks_.Contains(block));
360 // Keep the list of dominated blocks sorted such that if there is two
361 // succeeding block in this list, the predecessor is before the successor.
362 int index = 0;
363 while (index < dominated_blocks_.length() &&
364 dominated_blocks_[index]->block_id() < block->block_id()) {
365 ++index;
366 }
367 dominated_blocks_.InsertAt(index, block, zone());
368}
369
370
371void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
372 if (dominator_ == NULL) {
373 dominator_ = other;
374 other->AddDominatedBlock(this);
375 } else if (other->dominator() != NULL) {
376 HBasicBlock* first = dominator_;
377 HBasicBlock* second = other;
378
379 while (first != second) {
380 if (first->block_id() > second->block_id()) {
381 first = first->dominator();
382 } else {
383 second = second->dominator();
384 }
385 DCHECK(first != NULL && second != NULL);
386 }
387
388 if (dominator_ != first) {
389 DCHECK(dominator_->dominated_blocks_.Contains(this));
390 dominator_->dominated_blocks_.RemoveElement(this);
391 dominator_ = first;
392 first->AddDominatedBlock(this);
393 }
394 }
395}
396
397
398void HBasicBlock::AssignLoopSuccessorDominators() {
399 // Mark blocks that dominate all subsequent reachable blocks inside their
400 // loop. Exploit the fact that blocks are sorted in reverse post order. When
401 // the loop is visited in increasing block id order, if the number of
402 // non-loop-exiting successor edges at the dominator_candidate block doesn't
403 // exceed the number of previously encountered predecessor edges, there is no
404 // path from the loop header to any block with higher id that doesn't go
405 // through the dominator_candidate block. In this case, the
406 // dominator_candidate block is guaranteed to dominate all blocks reachable
407 // from it with higher ids.
408 HBasicBlock* last = loop_information()->GetLastBackEdge();
409 int outstanding_successors = 1; // one edge from the pre-header
410 // Header always dominates everything.
411 MarkAsLoopSuccessorDominator();
412 for (int j = block_id(); j <= last->block_id(); ++j) {
413 HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
414 for (HPredecessorIterator it(dominator_candidate); !it.Done();
415 it.Advance()) {
416 HBasicBlock* predecessor = it.Current();
417 // Don't count back edges.
418 if (predecessor->block_id() < dominator_candidate->block_id()) {
419 outstanding_successors--;
420 }
421 }
422
423 // If more successors than predecessors have been seen in the loop up to
424 // now, it's not possible to guarantee that the current block dominates
425 // all of the blocks with higher IDs. In this case, assume conservatively
426 // that those paths through loop that don't go through the current block
427 // contain all of the loop's dependencies. Also be careful to record
428 // dominator information about the current loop that's being processed,
429 // and not nested loops, which will be processed when
430 // AssignLoopSuccessorDominators gets called on their header.
431 DCHECK(outstanding_successors >= 0);
432 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
433 if (outstanding_successors == 0 &&
434 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
435 dominator_candidate->MarkAsLoopSuccessorDominator();
436 }
437 HControlInstruction* end = dominator_candidate->end();
438 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
439 HBasicBlock* successor = it.Current();
440 // Only count successors that remain inside the loop and don't loop back
441 // to a loop header.
442 if (successor->block_id() > dominator_candidate->block_id() &&
443 successor->block_id() <= last->block_id()) {
444 // Backwards edges must land on loop headers.
445 DCHECK(successor->block_id() > dominator_candidate->block_id() ||
446 successor->IsLoopHeader());
447 outstanding_successors++;
448 }
449 }
450 }
451}
452
453
454int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
455 for (int i = 0; i < predecessors_.length(); ++i) {
456 if (predecessors_[i] == predecessor) return i;
457 }
458 UNREACHABLE();
459 return -1;
460}
461
462
463#ifdef DEBUG
464void HBasicBlock::Verify() {
465 // Check that every block is finished.
466 DCHECK(IsFinished());
467 DCHECK(block_id() >= 0);
468
469 // Check that the incoming edges are in edge split form.
470 if (predecessors_.length() > 1) {
471 for (int i = 0; i < predecessors_.length(); ++i) {
472 DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
473 }
474 }
475}
476#endif
477
478
479void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
480 this->back_edges_.Add(block, block->zone());
481 AddBlock(block);
482}
483
484
485HBasicBlock* HLoopInformation::GetLastBackEdge() const {
486 int max_id = -1;
487 HBasicBlock* result = NULL;
488 for (int i = 0; i < back_edges_.length(); ++i) {
489 HBasicBlock* cur = back_edges_[i];
490 if (cur->block_id() > max_id) {
491 max_id = cur->block_id();
492 result = cur;
493 }
494 }
495 return result;
496}
497
498
499void HLoopInformation::AddBlock(HBasicBlock* block) {
500 if (block == loop_header()) return;
501 if (block->parent_loop_header() == loop_header()) return;
502 if (block->parent_loop_header() != NULL) {
503 AddBlock(block->parent_loop_header());
504 } else {
505 block->set_parent_loop_header(loop_header());
506 blocks_.Add(block, block->zone());
507 for (int i = 0; i < block->predecessors()->length(); ++i) {
508 AddBlock(block->predecessors()->at(i));
509 }
510 }
511}
512
513
514#ifdef DEBUG
515
516// Checks reachability of the blocks in this graph and stores a bit in
517// the BitVector "reachable()" for every block that can be reached
518// from the start block of the graph. If "dont_visit" is non-null, the given
519// block is treated as if it would not be part of the graph. "visited_count()"
520// returns the number of reachable blocks.
521class ReachabilityAnalyzer BASE_EMBEDDED {
522 public:
523 ReachabilityAnalyzer(HBasicBlock* entry_block,
524 int block_count,
525 HBasicBlock* dont_visit)
526 : visited_count_(0),
527 stack_(16, entry_block->zone()),
528 reachable_(block_count, entry_block->zone()),
529 dont_visit_(dont_visit) {
530 PushBlock(entry_block);
531 Analyze();
532 }
533
534 int visited_count() const { return visited_count_; }
535 const BitVector* reachable() const { return &reachable_; }
536
537 private:
538 void PushBlock(HBasicBlock* block) {
539 if (block != NULL && block != dont_visit_ &&
540 !reachable_.Contains(block->block_id())) {
541 reachable_.Add(block->block_id());
542 stack_.Add(block, block->zone());
543 visited_count_++;
544 }
545 }
546
547 void Analyze() {
548 while (!stack_.is_empty()) {
549 HControlInstruction* end = stack_.RemoveLast()->end();
550 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
551 PushBlock(it.Current());
552 }
553 }
554 }
555
556 int visited_count_;
557 ZoneList<HBasicBlock*> stack_;
558 BitVector reachable_;
559 HBasicBlock* dont_visit_;
560};
561
562
563void HGraph::Verify(bool do_full_verify) const {
564 Heap::RelocationLock relocation_lock(isolate()->heap());
565 AllowHandleDereference allow_deref;
566 AllowDeferredHandleDereference allow_deferred_deref;
567 for (int i = 0; i < blocks_.length(); i++) {
568 HBasicBlock* block = blocks_.at(i);
569
570 block->Verify();
571
572 // Check that every block contains at least one node and that only the last
573 // node is a control instruction.
574 HInstruction* current = block->first();
575 DCHECK(current != NULL && current->IsBlockEntry());
576 while (current != NULL) {
577 DCHECK((current->next() == NULL) == current->IsControlInstruction());
578 DCHECK(current->block() == block);
579 current->Verify();
580 current = current->next();
581 }
582
583 // Check that successors are correctly set.
584 HBasicBlock* first = block->end()->FirstSuccessor();
585 HBasicBlock* second = block->end()->SecondSuccessor();
586 DCHECK(second == NULL || first != NULL);
587
588 // Check that the predecessor array is correct.
589 if (first != NULL) {
590 DCHECK(first->predecessors()->Contains(block));
591 if (second != NULL) {
592 DCHECK(second->predecessors()->Contains(block));
593 }
594 }
595
596 // Check that phis have correct arguments.
597 for (int j = 0; j < block->phis()->length(); j++) {
598 HPhi* phi = block->phis()->at(j);
599 phi->Verify();
600 }
601
602 // Check that all join blocks have predecessors that end with an
603 // unconditional goto and agree on their environment node id.
604 if (block->predecessors()->length() >= 2) {
605 BailoutId id =
606 block->predecessors()->first()->last_environment()->ast_id();
607 for (int k = 0; k < block->predecessors()->length(); k++) {
608 HBasicBlock* predecessor = block->predecessors()->at(k);
609 DCHECK(predecessor->end()->IsGoto() ||
610 predecessor->end()->IsDeoptimize());
611 DCHECK(predecessor->last_environment()->ast_id() == id);
612 }
613 }
614 }
615
616 // Check special property of first block to have no predecessors.
617 DCHECK(blocks_.at(0)->predecessors()->is_empty());
618
619 if (do_full_verify) {
620 // Check that the graph is fully connected.
621 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
622 DCHECK(analyzer.visited_count() == blocks_.length());
623
624 // Check that entry block dominator is NULL.
625 DCHECK(entry_block_->dominator() == NULL);
626
627 // Check dominators.
628 for (int i = 0; i < blocks_.length(); ++i) {
629 HBasicBlock* block = blocks_.at(i);
630 if (block->dominator() == NULL) {
631 // Only start block may have no dominator assigned to.
632 DCHECK(i == 0);
633 } else {
634 // Assert that block is unreachable if dominator must not be visited.
635 ReachabilityAnalyzer dominator_analyzer(entry_block_,
636 blocks_.length(),
637 block->dominator());
638 DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
639 }
640 }
641 }
642}
643
644#endif
645
646
647HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
648 int32_t value) {
649 if (!pointer->is_set()) {
650 // Can't pass GetInvalidContext() to HConstant::New, because that will
651 // recursively call GetConstant
652 HConstant* constant = HConstant::New(isolate(), zone(), NULL, value);
653 constant->InsertAfter(entry_block()->first());
654 pointer->set(constant);
655 return constant;
656 }
657 return ReinsertConstantIfNecessary(pointer->get());
658}
659
660
661HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
662 if (!constant->IsLinked()) {
663 // The constant was removed from the graph. Reinsert.
664 constant->ClearFlag(HValue::kIsDead);
665 constant->InsertAfter(entry_block()->first());
666 }
667 return constant;
668}
669
670
671HConstant* HGraph::GetConstant0() {
672 return GetConstant(&constant_0_, 0);
673}
674
675
676HConstant* HGraph::GetConstant1() {
677 return GetConstant(&constant_1_, 1);
678}
679
680
681HConstant* HGraph::GetConstantMinus1() {
682 return GetConstant(&constant_minus1_, -1);
683}
684
685
686HConstant* HGraph::GetConstantBool(bool value) {
687 return value ? GetConstantTrue() : GetConstantFalse();
688}
689
690
691#define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \
692HConstant* HGraph::GetConstant##Name() { \
693 if (!constant_##name##_.is_set()) { \
694 HConstant* constant = new(zone()) HConstant( \
695 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
696 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \
697 false, \
698 Representation::Tagged(), \
699 htype, \
700 true, \
701 boolean_value, \
702 false, \
703 ODDBALL_TYPE); \
704 constant->InsertAfter(entry_block()->first()); \
705 constant_##name##_.set(constant); \
706 } \
707 return ReinsertConstantIfNecessary(constant_##name##_.get()); \
708}
709
710
711DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false)
712DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true)
713DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false)
714DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false)
715DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false)
716
717
718#undef DEFINE_GET_CONSTANT
719
720#define DEFINE_IS_CONSTANT(Name, name) \
721bool HGraph::IsConstant##Name(HConstant* constant) { \
722 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \
723}
724DEFINE_IS_CONSTANT(Undefined, undefined)
725DEFINE_IS_CONSTANT(0, 0)
726DEFINE_IS_CONSTANT(1, 1)
727DEFINE_IS_CONSTANT(Minus1, minus1)
728DEFINE_IS_CONSTANT(True, true)
729DEFINE_IS_CONSTANT(False, false)
730DEFINE_IS_CONSTANT(Hole, the_hole)
731DEFINE_IS_CONSTANT(Null, null)
732
733#undef DEFINE_IS_CONSTANT
734
735
736HConstant* HGraph::GetInvalidContext() {
737 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
738}
739
740
741bool HGraph::IsStandardConstant(HConstant* constant) {
742 if (IsConstantUndefined(constant)) return true;
743 if (IsConstant0(constant)) return true;
744 if (IsConstant1(constant)) return true;
745 if (IsConstantMinus1(constant)) return true;
746 if (IsConstantTrue(constant)) return true;
747 if (IsConstantFalse(constant)) return true;
748 if (IsConstantHole(constant)) return true;
749 if (IsConstantNull(constant)) return true;
750 return false;
751}
752
753
754HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
755
756
757HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
758 : needs_compare_(true) {
759 Initialize(builder);
760}
761
762
763HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
764 HIfContinuation* continuation)
765 : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
766 InitializeDontCreateBlocks(builder);
767 continuation->Continue(&first_true_block_, &first_false_block_);
768}
769
770
771void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
772 HGraphBuilder* builder) {
773 builder_ = builder;
774 finished_ = false;
775 did_then_ = false;
776 did_else_ = false;
777 did_else_if_ = false;
778 did_and_ = false;
779 did_or_ = false;
780 captured_ = false;
781 pending_merge_block_ = false;
782 split_edge_merge_block_ = NULL;
783 merge_at_join_blocks_ = NULL;
784 normal_merge_at_join_block_count_ = 0;
785 deopt_merge_at_join_block_count_ = 0;
786}
787
788
789void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
790 InitializeDontCreateBlocks(builder);
791 HEnvironment* env = builder->environment();
792 first_true_block_ = builder->CreateBasicBlock(env->Copy());
793 first_false_block_ = builder->CreateBasicBlock(env->Copy());
794}
795
796
797HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
798 HControlInstruction* compare) {
799 DCHECK(did_then_ == did_else_);
800 if (did_else_) {
801 // Handle if-then-elseif
802 did_else_if_ = true;
803 did_else_ = false;
804 did_then_ = false;
805 did_and_ = false;
806 did_or_ = false;
807 pending_merge_block_ = false;
808 split_edge_merge_block_ = NULL;
809 HEnvironment* env = builder()->environment();
810 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
811 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
812 }
813 if (split_edge_merge_block_ != NULL) {
814 HEnvironment* env = first_false_block_->last_environment();
815 HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
816 if (did_or_) {
817 compare->SetSuccessorAt(0, split_edge);
818 compare->SetSuccessorAt(1, first_false_block_);
819 } else {
820 compare->SetSuccessorAt(0, first_true_block_);
821 compare->SetSuccessorAt(1, split_edge);
822 }
823 builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
824 } else {
825 compare->SetSuccessorAt(0, first_true_block_);
826 compare->SetSuccessorAt(1, first_false_block_);
827 }
828 builder()->FinishCurrentBlock(compare);
829 needs_compare_ = false;
830 return compare;
831}
832
833
834void HGraphBuilder::IfBuilder::Or() {
835 DCHECK(!needs_compare_);
836 DCHECK(!did_and_);
837 did_or_ = true;
838 HEnvironment* env = first_false_block_->last_environment();
839 if (split_edge_merge_block_ == NULL) {
840 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
841 builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
842 first_true_block_ = split_edge_merge_block_;
843 }
844 builder()->set_current_block(first_false_block_);
845 first_false_block_ = builder()->CreateBasicBlock(env->Copy());
846}
847
848
849void HGraphBuilder::IfBuilder::And() {
850 DCHECK(!needs_compare_);
851 DCHECK(!did_or_);
852 did_and_ = true;
853 HEnvironment* env = first_false_block_->last_environment();
854 if (split_edge_merge_block_ == NULL) {
855 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
856 builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
857 first_false_block_ = split_edge_merge_block_;
858 }
859 builder()->set_current_block(first_true_block_);
860 first_true_block_ = builder()->CreateBasicBlock(env->Copy());
861}
862
863
864void HGraphBuilder::IfBuilder::CaptureContinuation(
865 HIfContinuation* continuation) {
866 DCHECK(!did_else_if_);
867 DCHECK(!finished_);
868 DCHECK(!captured_);
869
870 HBasicBlock* true_block = NULL;
871 HBasicBlock* false_block = NULL;
872 Finish(&true_block, &false_block);
873 DCHECK(true_block != NULL);
874 DCHECK(false_block != NULL);
875 continuation->Capture(true_block, false_block);
876 captured_ = true;
877 builder()->set_current_block(NULL);
878 End();
879}
880
881
882void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
883 DCHECK(!did_else_if_);
884 DCHECK(!finished_);
885 DCHECK(!captured_);
886 HBasicBlock* true_block = NULL;
887 HBasicBlock* false_block = NULL;
888 Finish(&true_block, &false_block);
889 merge_at_join_blocks_ = NULL;
890 if (true_block != NULL && !true_block->IsFinished()) {
891 DCHECK(continuation->IsTrueReachable());
892 builder()->GotoNoSimulate(true_block, continuation->true_branch());
893 }
894 if (false_block != NULL && !false_block->IsFinished()) {
895 DCHECK(continuation->IsFalseReachable());
896 builder()->GotoNoSimulate(false_block, continuation->false_branch());
897 }
898 captured_ = true;
899 End();
900}
901
902
903void HGraphBuilder::IfBuilder::Then() {
904 DCHECK(!captured_);
905 DCHECK(!finished_);
906 did_then_ = true;
907 if (needs_compare_) {
908 // Handle if's without any expressions, they jump directly to the "else"
909 // branch. However, we must pretend that the "then" branch is reachable,
910 // so that the graph builder visits it and sees any live range extending
911 // constructs within it.
912 HConstant* constant_false = builder()->graph()->GetConstantFalse();
913 ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
914 boolean_type.Add(ToBooleanStub::BOOLEAN);
915 HBranch* branch = builder()->New<HBranch>(
916 constant_false, boolean_type, first_true_block_, first_false_block_);
917 builder()->FinishCurrentBlock(branch);
918 }
919 builder()->set_current_block(first_true_block_);
920 pending_merge_block_ = true;
921}
922
923
924void HGraphBuilder::IfBuilder::Else() {
925 DCHECK(did_then_);
926 DCHECK(!captured_);
927 DCHECK(!finished_);
928 AddMergeAtJoinBlock(false);
929 builder()->set_current_block(first_false_block_);
930 pending_merge_block_ = true;
931 did_else_ = true;
932}
933
934
935void HGraphBuilder::IfBuilder::Deopt(Deoptimizer::DeoptReason reason) {
936 DCHECK(did_then_);
937 builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
938 AddMergeAtJoinBlock(true);
939}
940
941
942void HGraphBuilder::IfBuilder::Return(HValue* value) {
943 HValue* parameter_count = builder()->graph()->GetConstantMinus1();
944 builder()->FinishExitCurrentBlock(
945 builder()->New<HReturn>(value, parameter_count));
946 AddMergeAtJoinBlock(false);
947}
948
949
950void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
951 if (!pending_merge_block_) return;
952 HBasicBlock* block = builder()->current_block();
953 DCHECK(block == NULL || !block->IsFinished());
954 MergeAtJoinBlock* record = new (builder()->zone())
955 MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
956 merge_at_join_blocks_ = record;
957 if (block != NULL) {
958 DCHECK(block->end() == NULL);
959 if (deopt) {
960 normal_merge_at_join_block_count_++;
961 } else {
962 deopt_merge_at_join_block_count_++;
963 }
964 }
965 builder()->set_current_block(NULL);
966 pending_merge_block_ = false;
967}
968
969
970void HGraphBuilder::IfBuilder::Finish() {
971 DCHECK(!finished_);
972 if (!did_then_) {
973 Then();
974 }
975 AddMergeAtJoinBlock(false);
976 if (!did_else_) {
977 Else();
978 AddMergeAtJoinBlock(false);
979 }
980 finished_ = true;
981}
982
983
984void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
985 HBasicBlock** else_continuation) {
986 Finish();
987
988 MergeAtJoinBlock* else_record = merge_at_join_blocks_;
989 if (else_continuation != NULL) {
990 *else_continuation = else_record->block_;
991 }
992 MergeAtJoinBlock* then_record = else_record->next_;
993 if (then_continuation != NULL) {
994 *then_continuation = then_record->block_;
995 }
996 DCHECK(then_record->next_ == NULL);
997}
998
999
1000void HGraphBuilder::IfBuilder::EndUnreachable() {
1001 if (captured_) return;
1002 Finish();
1003 builder()->set_current_block(nullptr);
1004}
1005
1006
1007void HGraphBuilder::IfBuilder::End() {
1008 if (captured_) return;
1009 Finish();
1010
1011 int total_merged_blocks = normal_merge_at_join_block_count_ +
1012 deopt_merge_at_join_block_count_;
1013 DCHECK(total_merged_blocks >= 1);
1014 HBasicBlock* merge_block =
1015 total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
1016
1017 // Merge non-deopt blocks first to ensure environment has right size for
1018 // padding.
1019 MergeAtJoinBlock* current = merge_at_join_blocks_;
1020 while (current != NULL) {
1021 if (!current->deopt_ && current->block_ != NULL) {
1022 // If there is only one block that makes it through to the end of the
1023 // if, then just set it as the current block and continue rather then
1024 // creating an unnecessary merge block.
1025 if (total_merged_blocks == 1) {
1026 builder()->set_current_block(current->block_);
1027 return;
1028 }
1029 builder()->GotoNoSimulate(current->block_, merge_block);
1030 }
1031 current = current->next_;
1032 }
1033
1034 // Merge deopt blocks, padding when necessary.
1035 current = merge_at_join_blocks_;
1036 while (current != NULL) {
1037 if (current->deopt_ && current->block_ != NULL) {
1038 current->block_->FinishExit(
1039 HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL),
1040 SourcePosition::Unknown());
1041 }
1042 current = current->next_;
1043 }
1044 builder()->set_current_block(merge_block);
1045}
1046
1047
1048HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
1049 Initialize(builder, NULL, kWhileTrue, NULL);
1050}
1051
1052
1053HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1054 LoopBuilder::Direction direction) {
1055 Initialize(builder, context, direction, builder->graph()->GetConstant1());
1056}
1057
1058
1059HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1060 LoopBuilder::Direction direction,
1061 HValue* increment_amount) {
1062 Initialize(builder, context, direction, increment_amount);
1063 increment_amount_ = increment_amount;
1064}
1065
1066
1067void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
1068 HValue* context,
1069 Direction direction,
1070 HValue* increment_amount) {
1071 builder_ = builder;
1072 context_ = context;
1073 direction_ = direction;
1074 increment_amount_ = increment_amount;
1075
1076 finished_ = false;
1077 header_block_ = builder->CreateLoopHeaderBlock();
1078 body_block_ = NULL;
1079 exit_block_ = NULL;
1080 exit_trampoline_block_ = NULL;
1081}
1082
1083
1084HValue* HGraphBuilder::LoopBuilder::BeginBody(
1085 HValue* initial,
1086 HValue* terminating,
1087 Token::Value token) {
1088 DCHECK(direction_ != kWhileTrue);
1089 HEnvironment* env = builder_->environment();
1090 phi_ = header_block_->AddNewPhi(env->values()->length());
1091 phi_->AddInput(initial);
1092 env->Push(initial);
1093 builder_->GotoNoSimulate(header_block_);
1094
1095 HEnvironment* body_env = env->Copy();
1096 HEnvironment* exit_env = env->Copy();
1097 // Remove the phi from the expression stack
1098 body_env->Pop();
1099 exit_env->Pop();
1100 body_block_ = builder_->CreateBasicBlock(body_env);
1101 exit_block_ = builder_->CreateBasicBlock(exit_env);
1102
1103 builder_->set_current_block(header_block_);
1104 env->Pop();
1105 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1106 phi_, terminating, token, body_block_, exit_block_));
1107
1108 builder_->set_current_block(body_block_);
1109 if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1110 Isolate* isolate = builder_->isolate();
1111 HValue* one = builder_->graph()->GetConstant1();
1112 if (direction_ == kPreIncrement) {
1113 increment_ = HAdd::New(isolate, zone(), context_, phi_, one);
1114 } else {
1115 increment_ = HSub::New(isolate, zone(), context_, phi_, one);
1116 }
1117 increment_->ClearFlag(HValue::kCanOverflow);
1118 builder_->AddInstruction(increment_);
1119 return increment_;
1120 } else {
1121 return phi_;
1122 }
1123}
1124
1125
1126void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
1127 DCHECK(direction_ == kWhileTrue);
1128 HEnvironment* env = builder_->environment();
1129 builder_->GotoNoSimulate(header_block_);
1130 builder_->set_current_block(header_block_);
1131 env->Drop(drop_count);
1132}
1133
1134
1135void HGraphBuilder::LoopBuilder::Break() {
1136 if (exit_trampoline_block_ == NULL) {
1137 // Its the first time we saw a break.
1138 if (direction_ == kWhileTrue) {
1139 HEnvironment* env = builder_->environment()->Copy();
1140 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1141 } else {
1142 HEnvironment* env = exit_block_->last_environment()->Copy();
1143 exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1144 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1145 }
1146 }
1147
1148 builder_->GotoNoSimulate(exit_trampoline_block_);
1149 builder_->set_current_block(NULL);
1150}
1151
1152
1153void HGraphBuilder::LoopBuilder::EndBody() {
1154 DCHECK(!finished_);
1155
1156 if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1157 Isolate* isolate = builder_->isolate();
1158 if (direction_ == kPostIncrement) {
1159 increment_ =
1160 HAdd::New(isolate, zone(), context_, phi_, increment_amount_);
1161 } else {
1162 increment_ =
1163 HSub::New(isolate, zone(), context_, phi_, increment_amount_);
1164 }
1165 increment_->ClearFlag(HValue::kCanOverflow);
1166 builder_->AddInstruction(increment_);
1167 }
1168
1169 if (direction_ != kWhileTrue) {
1170 // Push the new increment value on the expression stack to merge into
1171 // the phi.
1172 builder_->environment()->Push(increment_);
1173 }
1174 HBasicBlock* last_block = builder_->current_block();
1175 builder_->GotoNoSimulate(last_block, header_block_);
1176 header_block_->loop_information()->RegisterBackEdge(last_block);
1177
1178 if (exit_trampoline_block_ != NULL) {
1179 builder_->set_current_block(exit_trampoline_block_);
1180 } else {
1181 builder_->set_current_block(exit_block_);
1182 }
1183 finished_ = true;
1184}
1185
1186
1187HGraph* HGraphBuilder::CreateGraph() {
1188 graph_ = new(zone()) HGraph(info_);
1189 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1190 CompilationPhase phase("H_Block building", info_);
1191 set_current_block(graph()->entry_block());
1192 if (!BuildGraph()) return NULL;
1193 graph()->FinalizeUniqueness();
1194 return graph_;
1195}
1196
1197
1198HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1199 DCHECK(current_block() != NULL);
1200 DCHECK(!FLAG_hydrogen_track_positions ||
1201 !position_.IsUnknown() ||
1202 !info_->IsOptimizing());
1203 current_block()->AddInstruction(instr, source_position());
1204 if (graph()->IsInsideNoSideEffectsScope()) {
1205 instr->SetFlag(HValue::kHasNoObservableSideEffects);
1206 }
1207 return instr;
1208}
1209
1210
1211void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1212 DCHECK(!FLAG_hydrogen_track_positions ||
1213 !info_->IsOptimizing() ||
1214 !position_.IsUnknown());
1215 current_block()->Finish(last, source_position());
1216 if (last->IsReturn() || last->IsAbnormalExit()) {
1217 set_current_block(NULL);
1218 }
1219}
1220
1221
1222void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1223 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1224 !position_.IsUnknown());
1225 current_block()->FinishExit(instruction, source_position());
1226 if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1227 set_current_block(NULL);
1228 }
1229}
1230
1231
1232void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1233 if (FLAG_native_code_counters && counter->Enabled()) {
1234 HValue* reference = Add<HConstant>(ExternalReference(counter));
1235 HValue* old_value =
1236 Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter());
1237 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1238 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow
1239 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1240 new_value, STORE_TO_INITIALIZED_ENTRY);
1241 }
1242}
1243
1244
1245void HGraphBuilder::AddSimulate(BailoutId id,
1246 RemovableSimulate removable) {
1247 DCHECK(current_block() != NULL);
1248 DCHECK(!graph()->IsInsideNoSideEffectsScope());
1249 current_block()->AddNewSimulate(id, source_position(), removable);
1250}
1251
1252
1253HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1254 HBasicBlock* b = graph()->CreateBasicBlock();
1255 b->SetInitialEnvironment(env);
1256 return b;
1257}
1258
1259
1260HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1261 HBasicBlock* header = graph()->CreateBasicBlock();
1262 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1263 header->SetInitialEnvironment(entry_env);
1264 header->AttachLoopInformation();
1265 return header;
1266}
1267
1268
1269HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1270 HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
1271
1272 HValue* bit_field2 =
1273 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
1274 return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1275}
1276
1277
1278HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1279 if (obj->type().IsHeapObject()) return obj;
1280 return Add<HCheckHeapObject>(obj);
1281}
1282
1283
1284void HGraphBuilder::FinishExitWithHardDeoptimization(
1285 Deoptimizer::DeoptReason reason) {
1286 Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1287 FinishExitCurrentBlock(New<HAbnormalExit>());
1288}
1289
1290
1291HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1292 if (!string->type().IsString()) {
1293 DCHECK(!string->IsConstant() ||
1294 !HConstant::cast(string)->HasStringValue());
1295 BuildCheckHeapObject(string);
1296 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1297 }
1298 return string;
1299}
1300
1301
1302HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1303 if (object->type().IsJSObject()) return object;
1304 if (function->IsConstant() &&
1305 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1306 Handle<JSFunction> f = Handle<JSFunction>::cast(
1307 HConstant::cast(function)->handle(isolate()));
1308 SharedFunctionInfo* shared = f->shared();
1309 if (is_strict(shared->language_mode()) || shared->native()) return object;
1310 }
1311 return Add<HWrapReceiver>(object, function);
1312}
1313
1314
1315HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity(
1316 HValue* object, HValue* elements, ElementsKind kind, HValue* length,
1317 HValue* capacity, HValue* key) {
1318 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1319 HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap);
1320 Add<HBoundsCheck>(key, max_capacity);
1321
1322 HValue* new_capacity = BuildNewElementsCapacity(key);
1323 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind,
1324 length, new_capacity);
1325 return new_elements;
1326}
1327
1328
1329HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1330 HValue* object,
1331 HValue* elements,
1332 ElementsKind kind,
1333 HValue* length,
1334 HValue* key,
1335 bool is_js_array,
1336 PropertyAccessType access_type) {
1337 IfBuilder length_checker(this);
1338
1339 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1340 length_checker.If<HCompareNumericAndBranch>(key, length, token);
1341
1342 length_checker.Then();
1343
1344 HValue* current_capacity = AddLoadFixedArrayLength(elements);
1345
1346 if (top_info()->IsStub()) {
1347 IfBuilder capacity_checker(this);
1348 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1349 Token::GTE);
1350 capacity_checker.Then();
1351 HValue* new_elements = BuildCheckAndGrowElementsCapacity(
1352 object, elements, kind, length, current_capacity, key);
1353 environment()->Push(new_elements);
1354 capacity_checker.Else();
1355 environment()->Push(elements);
1356 capacity_checker.End();
1357 } else {
1358 HValue* result = Add<HMaybeGrowElements>(
1359 object, elements, key, current_capacity, is_js_array, kind);
1360 environment()->Push(result);
1361 }
1362
1363 if (is_js_array) {
1364 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1365 new_length->ClearFlag(HValue::kCanOverflow);
1366
1367 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1368 new_length);
1369 }
1370
1371 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1372 HValue* checked_elements = environment()->Top();
1373
1374 // Write zero to ensure that the new element is initialized with some smi.
1375 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr,
1376 kind);
1377 }
1378
1379 length_checker.Else();
1380 Add<HBoundsCheck>(key, length);
1381
1382 environment()->Push(elements);
1383 length_checker.End();
1384
1385 return environment()->Pop();
1386}
1387
1388
1389HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1390 HValue* elements,
1391 ElementsKind kind,
1392 HValue* length) {
1393 Factory* factory = isolate()->factory();
1394
1395 IfBuilder cow_checker(this);
1396
1397 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1398 cow_checker.Then();
1399
1400 HValue* capacity = AddLoadFixedArrayLength(elements);
1401
1402 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1403 kind, length, capacity);
1404
1405 environment()->Push(new_elements);
1406
1407 cow_checker.Else();
1408
1409 environment()->Push(elements);
1410
1411 cow_checker.End();
1412
1413 return environment()->Pop();
1414}
1415
1416
1417void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1418 HValue* map,
1419 ElementsKind from_kind,
1420 ElementsKind to_kind,
1421 bool is_jsarray) {
1422 DCHECK(!IsFastHoleyElementsKind(from_kind) ||
1423 IsFastHoleyElementsKind(to_kind));
1424
1425 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1426 Add<HTrapAllocationMemento>(object);
1427 }
1428
1429 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1430 HInstruction* elements = AddLoadElements(object);
1431
1432 HInstruction* empty_fixed_array = Add<HConstant>(
1433 isolate()->factory()->empty_fixed_array());
1434
1435 IfBuilder if_builder(this);
1436
1437 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1438
1439 if_builder.Then();
1440
1441 HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1442
1443 HInstruction* array_length =
1444 is_jsarray
1445 ? Add<HLoadNamedField>(object, nullptr,
1446 HObjectAccess::ForArrayLength(from_kind))
1447 : elements_length;
1448
1449 BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1450 array_length, elements_length);
1451
1452 if_builder.End();
1453 }
1454
1455 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1456}
1457
1458
1459void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
1460 int bit_field_mask) {
1461 // Check that the object isn't a smi.
1462 Add<HCheckHeapObject>(receiver);
1463
1464 // Get the map of the receiver.
1465 HValue* map =
1466 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1467
1468 // Check the instance type and if an access check is needed, this can be
1469 // done with a single load, since both bytes are adjacent in the map.
1470 HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
1471 HValue* instance_type_and_bit_field =
1472 Add<HLoadNamedField>(map, nullptr, access);
1473
1474 HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
1475 HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
1476 instance_type_and_bit_field,
1477 mask);
1478 HValue* sub_result = AddUncasted<HSub>(and_result,
1479 Add<HConstant>(JS_OBJECT_TYPE));
1480 Add<HBoundsCheck>(sub_result,
1481 Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE));
1482}
1483
1484
1485void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
1486 HIfContinuation* join_continuation) {
1487 // The sometimes unintuitively backward ordering of the ifs below is
1488 // convoluted, but necessary. All of the paths must guarantee that the
1489 // if-true of the continuation returns a smi element index and the if-false of
1490 // the continuation returns either a symbol or a unique string key. All other
1491 // object types cause a deopt to fall back to the runtime.
1492
1493 IfBuilder key_smi_if(this);
1494 key_smi_if.If<HIsSmiAndBranch>(key);
1495 key_smi_if.Then();
1496 {
1497 Push(key); // Nothing to do, just continue to true of continuation.
1498 }
1499 key_smi_if.Else();
1500 {
1501 HValue* map = Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForMap());
1502 HValue* instance_type =
1503 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1504
1505 // Non-unique string, check for a string with a hash code that is actually
1506 // an index.
1507 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
1508 IfBuilder not_string_or_name_if(this);
1509 not_string_or_name_if.If<HCompareNumericAndBranch>(
1510 instance_type,
1511 Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
1512 Token::GT);
1513
1514 not_string_or_name_if.Then();
1515 {
1516 // Non-smi, non-Name, non-String: Try to convert to smi in case of
1517 // HeapNumber.
1518 // TODO(danno): This could call some variant of ToString
1519 Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
1520 }
1521 not_string_or_name_if.Else();
1522 {
1523 // String or Name: check explicitly for Name, they can short-circuit
1524 // directly to unique non-index key path.
1525 IfBuilder not_symbol_if(this);
1526 not_symbol_if.If<HCompareNumericAndBranch>(
1527 instance_type,
1528 Add<HConstant>(SYMBOL_TYPE),
1529 Token::NE);
1530
1531 not_symbol_if.Then();
1532 {
1533 // String: check whether the String is a String of an index. If it is,
1534 // extract the index value from the hash.
1535 HValue* hash = Add<HLoadNamedField>(key, nullptr,
1536 HObjectAccess::ForNameHashField());
1537 HValue* not_index_mask = Add<HConstant>(static_cast<int>(
1538 String::kContainsCachedArrayIndexMask));
1539
1540 HValue* not_index_test = AddUncasted<HBitwise>(
1541 Token::BIT_AND, hash, not_index_mask);
1542
1543 IfBuilder string_index_if(this);
1544 string_index_if.If<HCompareNumericAndBranch>(not_index_test,
1545 graph()->GetConstant0(),
1546 Token::EQ);
1547 string_index_if.Then();
1548 {
1549 // String with index in hash: extract string and merge to index path.
1550 Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
1551 }
1552 string_index_if.Else();
1553 {
1554 // Key is a non-index String, check for uniqueness/internalization.
1555 // If it's not internalized yet, internalize it now.
1556 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1557 Token::BIT_AND,
1558 instance_type,
1559 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1560
1561 IfBuilder internalized(this);
1562 internalized.If<HCompareNumericAndBranch>(not_internalized_bit,
1563 graph()->GetConstant0(),
1564 Token::EQ);
1565 internalized.Then();
1566 Push(key);
1567
1568 internalized.Else();
1569 Add<HPushArguments>(key);
1570 HValue* intern_key = Add<HCallRuntime>(
1571 Runtime::FunctionForId(Runtime::kInternalizeString), 1);
1572 Push(intern_key);
1573
1574 internalized.End();
1575 // Key guaranteed to be a unique string
1576 }
1577 string_index_if.JoinContinuation(join_continuation);
1578 }
1579 not_symbol_if.Else();
1580 {
1581 Push(key); // Key is symbol
1582 }
1583 not_symbol_if.JoinContinuation(join_continuation);
1584 }
1585 not_string_or_name_if.JoinContinuation(join_continuation);
1586 }
1587 key_smi_if.JoinContinuation(join_continuation);
1588}
1589
1590
1591void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
1592 // Get the the instance type of the receiver, and make sure that it is
1593 // not one of the global object types.
1594 HValue* map =
1595 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1596 HValue* instance_type =
1597 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1598 HValue* global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
1599
1600 IfBuilder if_global_object(this);
1601 if_global_object.If<HCompareNumericAndBranch>(instance_type, global_type,
1602 Token::EQ);
1603 if_global_object.ThenDeopt(Deoptimizer::kReceiverWasAGlobalObject);
1604 if_global_object.End();
1605}
1606
1607
1608void HGraphBuilder::BuildTestForDictionaryProperties(
1609 HValue* object,
1610 HIfContinuation* continuation) {
1611 HValue* properties = Add<HLoadNamedField>(
1612 object, nullptr, HObjectAccess::ForPropertiesPointer());
1613 HValue* properties_map =
1614 Add<HLoadNamedField>(properties, nullptr, HObjectAccess::ForMap());
1615 HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
1616 IfBuilder builder(this);
1617 builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
1618 builder.CaptureContinuation(continuation);
1619}
1620
1621
1622HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
1623 HValue* key) {
1624 // Load the map of the receiver, compute the keyed lookup cache hash
1625 // based on 32 bits of the map pointer and the string hash.
1626 HValue* object_map =
1627 Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMapAsInteger32());
1628 HValue* shifted_map = AddUncasted<HShr>(
1629 object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
1630 HValue* string_hash =
1631 Add<HLoadNamedField>(key, nullptr, HObjectAccess::ForStringHashField());
1632 HValue* shifted_hash = AddUncasted<HShr>(
1633 string_hash, Add<HConstant>(String::kHashShift));
1634 HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
1635 shifted_hash);
1636 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
1637 return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
1638 Add<HConstant>(mask));
1639}
1640
1641
1642HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1643 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1644 HValue* seed = Add<HConstant>(seed_value);
1645 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1646
1647 // hash = ~hash + (hash << 15);
1648 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1649 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1650 graph()->GetConstantMinus1());
1651 hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1652
1653 // hash = hash ^ (hash >> 12);
1654 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1655 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1656
1657 // hash = hash + (hash << 2);
1658 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1659 hash = AddUncasted<HAdd>(hash, shifted_hash);
1660
1661 // hash = hash ^ (hash >> 4);
1662 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1663 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1664
1665 // hash = hash * 2057;
1666 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1667 hash->ClearFlag(HValue::kCanOverflow);
1668
1669 // hash = hash ^ (hash >> 16);
1670 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1671 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1672}
1673
1674
1675HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(
1676 HValue* receiver, HValue* elements, HValue* key, HValue* hash,
1677 LanguageMode language_mode) {
1678 HValue* capacity =
1679 Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex),
1680 nullptr, nullptr, FAST_ELEMENTS);
1681
1682 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1683 mask->ChangeRepresentation(Representation::Integer32());
1684 mask->ClearFlag(HValue::kCanOverflow);
1685
1686 HValue* entry = hash;
1687 HValue* count = graph()->GetConstant1();
1688 Push(entry);
1689 Push(count);
1690
1691 HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
1692 graph()->CreateBasicBlock());
1693 HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
1694 graph()->CreateBasicBlock());
1695 LoopBuilder probe_loop(this);
1696 probe_loop.BeginBody(2); // Drop entry, count from last environment to
1697 // appease live range building without simulates.
1698
1699 count = Pop();
1700 entry = Pop();
1701 entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
1702 int entry_size = SeededNumberDictionary::kEntrySize;
1703 HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
1704 base_index->ClearFlag(HValue::kCanOverflow);
1705 int start_offset = SeededNumberDictionary::kElementsStartIndex;
1706 HValue* key_index =
1707 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
1708 key_index->ClearFlag(HValue::kCanOverflow);
1709
1710 HValue* candidate_key =
1711 Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS);
1712 IfBuilder if_undefined(this);
1713 if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
1714 graph()->GetConstantUndefined());
1715 if_undefined.Then();
1716 {
1717 // element == undefined means "not found". Call the runtime.
1718 // TODO(jkummerow): walk the prototype chain instead.
1719 Add<HPushArguments>(receiver, key);
1720 Push(Add<HCallRuntime>(
1721 Runtime::FunctionForId(is_strong(language_mode)
1722 ? Runtime::kKeyedGetPropertyStrong
1723 : Runtime::kKeyedGetProperty),
1724 2));
1725 }
1726 if_undefined.Else();
1727 {
1728 IfBuilder if_match(this);
1729 if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
1730 if_match.Then();
1731 if_match.Else();
1732
1733 // Update non-internalized string in the dictionary with internalized key?
1734 IfBuilder if_update_with_internalized(this);
1735 HValue* smi_check =
1736 if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
1737 if_update_with_internalized.And();
1738 HValue* map = AddLoadMap(candidate_key, smi_check);
1739 HValue* instance_type =
1740 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1741 HValue* not_internalized_bit = AddUncasted<HBitwise>(
1742 Token::BIT_AND, instance_type,
1743 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1744 if_update_with_internalized.If<HCompareNumericAndBranch>(
1745 not_internalized_bit, graph()->GetConstant0(), Token::NE);
1746 if_update_with_internalized.And();
1747 if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
1748 candidate_key, graph()->GetConstantHole());
1749 if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
1750 key, Token::EQ);
1751 if_update_with_internalized.Then();
1752 // Replace a key that is a non-internalized string by the equivalent
1753 // internalized string for faster further lookups.
1754 Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS);
1755 if_update_with_internalized.Else();
1756
1757 if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
1758 if_match.JoinContinuation(&found_key_match_continuation);
1759
1760 IfBuilder found_key_match(this, &found_key_match_continuation);
1761 found_key_match.Then();
1762 // Key at current probe matches. Relevant bits in the |details| field must
1763 // be zero, otherwise the dictionary element requires special handling.
1764 HValue* details_index =
1765 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
1766 details_index->ClearFlag(HValue::kCanOverflow);
1767 HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr,
1768 FAST_ELEMENTS);
1769 int details_mask = PropertyDetails::TypeField::kMask;
1770 details = AddUncasted<HBitwise>(Token::BIT_AND, details,
1771 Add<HConstant>(details_mask));
1772 IfBuilder details_compare(this);
1773 details_compare.If<HCompareNumericAndBranch>(
1774 details, graph()->GetConstant0(), Token::EQ);
1775 details_compare.Then();
1776 HValue* result_index =
1777 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
1778 result_index->ClearFlag(HValue::kCanOverflow);
1779 Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr,
1780 FAST_ELEMENTS));
1781 details_compare.Else();
1782 Add<HPushArguments>(receiver, key);
1783 Push(Add<HCallRuntime>(
1784 Runtime::FunctionForId(is_strong(language_mode)
1785 ? Runtime::kKeyedGetPropertyStrong
1786 : Runtime::kKeyedGetProperty),
1787 2));
1788 details_compare.End();
1789
1790 found_key_match.Else();
1791 found_key_match.JoinContinuation(&return_or_loop_continuation);
1792 }
1793 if_undefined.JoinContinuation(&return_or_loop_continuation);
1794
1795 IfBuilder return_or_loop(this, &return_or_loop_continuation);
1796 return_or_loop.Then();
1797 probe_loop.Break();
1798
1799 return_or_loop.Else();
1800 entry = AddUncasted<HAdd>(entry, count);
1801 entry->ClearFlag(HValue::kCanOverflow);
1802 count = AddUncasted<HAdd>(count, graph()->GetConstant1());
1803 count->ClearFlag(HValue::kCanOverflow);
1804 Push(entry);
1805 Push(count);
1806
1807 probe_loop.EndBody();
1808
1809 return_or_loop.End();
1810
1811 return Pop();
1812}
1813
1814
1815HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value,
1816 HValue* done) {
1817 NoObservableSideEffectsScope scope(this);
1818
1819 // Allocate the JSIteratorResult object.
1820 HValue* result =
1821 Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(),
1822 NOT_TENURED, JS_ITERATOR_RESULT_TYPE);
1823
1824 // Initialize the JSIteratorResult object.
1825 HValue* native_context = BuildGetNativeContext();
1826 HValue* map = Add<HLoadNamedField>(
1827 native_context, nullptr,
1828 HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX));
1829 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
1830 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
1831 Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(),
1832 empty_fixed_array);
1833 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
1834 empty_fixed_array);
1835 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
1836 JSIteratorResult::kValueOffset),
1837 value);
1838 Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
1839 JSIteratorResult::kDoneOffset),
1840 done);
1841 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1842 return result;
1843}
1844
1845
1846HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length,
1847 HValue* index,
1848 HValue* input) {
1849 NoObservableSideEffectsScope scope(this);
1850 HConstant* max_length = Add<HConstant>(JSArray::kInitialMaxFastElementArray);
1851 Add<HBoundsCheck>(length, max_length);
1852
1853 // Generate size calculation code here in order to make it dominate
1854 // the JSRegExpResult allocation.
1855 ElementsKind elements_kind = FAST_ELEMENTS;
1856 HValue* size = BuildCalculateElementsSize(elements_kind, length);
1857
1858 // Allocate the JSRegExpResult and the FixedArray in one step.
1859 HValue* result = Add<HAllocate>(
1860 Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(),
1861 NOT_TENURED, JS_ARRAY_TYPE);
1862
1863 // Initialize the JSRegExpResult header.
1864 HValue* native_context = Add<HLoadNamedField>(
1865 context(), nullptr,
1866 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
1867 Add<HStoreNamedField>(
1868 result, HObjectAccess::ForMap(),
1869 Add<HLoadNamedField>(
1870 native_context, nullptr,
1871 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX)));
1872 HConstant* empty_fixed_array =
1873 Add<HConstant>(isolate()->factory()->empty_fixed_array());
1874 Add<HStoreNamedField>(
1875 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
1876 empty_fixed_array);
1877 Add<HStoreNamedField>(
1878 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1879 empty_fixed_array);
1880 Add<HStoreNamedField>(
1881 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length);
1882
1883 // Initialize the additional fields.
1884 Add<HStoreNamedField>(
1885 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset),
1886 index);
1887 Add<HStoreNamedField>(
1888 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset),
1889 input);
1890
1891 // Allocate and initialize the elements header.
1892 HAllocate* elements = BuildAllocateElements(elements_kind, size);
1893 BuildInitializeElementsHeader(elements, elements_kind, length);
1894
1895 if (!elements->has_size_upper_bound()) {
1896 HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize(
1897 elements_kind, max_length->Integer32Value());
1898 elements->set_size_upper_bound(size_in_bytes_upper_bound);
1899 }
1900
1901 Add<HStoreNamedField>(
1902 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
1903 elements);
1904
1905 // Initialize the elements contents with undefined.
1906 BuildFillElementsWithValue(
1907 elements, elements_kind, graph()->GetConstant0(), length,
1908 graph()->GetConstantUndefined());
1909
1910 return result;
1911}
1912
1913
1914HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
1915 NoObservableSideEffectsScope scope(this);
1916
1917 // Convert constant numbers at compile time.
1918 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1919 Handle<Object> number = HConstant::cast(object)->handle(isolate());
1920 Handle<String> result = isolate()->factory()->NumberToString(number);
1921 return Add<HConstant>(result);
1922 }
1923
1924 // Create a joinable continuation.
1925 HIfContinuation found(graph()->CreateBasicBlock(),
1926 graph()->CreateBasicBlock());
1927
1928 // Load the number string cache.
1929 HValue* number_string_cache =
1930 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1931
1932 // Make the hash mask from the length of the number string cache. It
1933 // contains two elements (number and string) for each cache entry.
1934 HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1935 mask->set_type(HType::Smi());
1936 mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1937 mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1938
1939 // Check whether object is a smi.
1940 IfBuilder if_objectissmi(this);
1941 if_objectissmi.If<HIsSmiAndBranch>(object);
1942 if_objectissmi.Then();
1943 {
1944 // Compute hash for smi similar to smi_get_hash().
1945 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1946
1947 // Load the key.
1948 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1949 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr,
1950 nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1951
1952 // Check if object == key.
1953 IfBuilder if_objectiskey(this);
1954 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1955 if_objectiskey.Then();
1956 {
1957 // Make the key_index available.
1958 Push(key_index);
1959 }
1960 if_objectiskey.JoinContinuation(&found);
1961 }
1962 if_objectissmi.Else();
1963 {
1964 if (type->Is(Type::SignedSmall())) {
1965 if_objectissmi.Deopt(Deoptimizer::kExpectedSmi);
1966 } else {
1967 // Check if the object is a heap number.
1968 IfBuilder if_objectisnumber(this);
1969 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1970 object, isolate()->factory()->heap_number_map());
1971 if_objectisnumber.Then();
1972 {
1973 // Compute hash for heap number similar to double_get_hash().
1974 HValue* low = Add<HLoadNamedField>(
1975 object, objectisnumber,
1976 HObjectAccess::ForHeapNumberValueLowestBits());
1977 HValue* high = Add<HLoadNamedField>(
1978 object, objectisnumber,
1979 HObjectAccess::ForHeapNumberValueHighestBits());
1980 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1981 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1982
1983 // Load the key.
1984 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1985 HValue* key =
1986 Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr,
1987 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1988
1989 // Check if the key is a heap number and compare it with the object.
1990 IfBuilder if_keyisnotsmi(this);
1991 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1992 if_keyisnotsmi.Then();
1993 {
1994 IfBuilder if_keyisheapnumber(this);
1995 if_keyisheapnumber.If<HCompareMap>(
1996 key, isolate()->factory()->heap_number_map());
1997 if_keyisheapnumber.Then();
1998 {
1999 // Check if values of key and object match.
2000 IfBuilder if_keyeqobject(this);
2001 if_keyeqobject.If<HCompareNumericAndBranch>(
2002 Add<HLoadNamedField>(key, keyisnotsmi,
2003 HObjectAccess::ForHeapNumberValue()),
2004 Add<HLoadNamedField>(object, objectisnumber,
2005 HObjectAccess::ForHeapNumberValue()),
2006 Token::EQ);
2007 if_keyeqobject.Then();
2008 {
2009 // Make the key_index available.
2010 Push(key_index);
2011 }
2012 if_keyeqobject.JoinContinuation(&found);
2013 }
2014 if_keyisheapnumber.JoinContinuation(&found);
2015 }
2016 if_keyisnotsmi.JoinContinuation(&found);
2017 }
2018 if_objectisnumber.Else();
2019 {
2020 if (type->Is(Type::Number())) {
2021 if_objectisnumber.Deopt(Deoptimizer::kExpectedHeapNumber);
2022 }
2023 }
2024 if_objectisnumber.JoinContinuation(&found);
2025 }
2026 }
2027 if_objectissmi.JoinContinuation(&found);
2028
2029 // Check for cache hit.
2030 IfBuilder if_found(this, &found);
2031 if_found.Then();
2032 {
2033 // Count number to string operation in native code.
2034 AddIncrementCounter(isolate()->counters()->number_to_string_native());
2035
2036 // Load the value in case of cache hit.
2037 HValue* key_index = Pop();
2038 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
2039 Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr,
2040 FAST_ELEMENTS, ALLOW_RETURN_HOLE));
2041 }
2042 if_found.Else();
2043 {
2044 // Cache miss, fallback to runtime.
2045 Add<HPushArguments>(object);
2046 Push(Add<HCallRuntime>(
2047 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
2048 1));
2049 }
2050 if_found.End();
2051
2052 return Pop();
2053}
2054
2055
2056HValue* HGraphBuilder::BuildToObject(HValue* receiver) {
2057 NoObservableSideEffectsScope scope(this);
2058
2059 // Create a joinable continuation.
2060 HIfContinuation wrap(graph()->CreateBasicBlock(),
2061 graph()->CreateBasicBlock());
2062
2063 // Determine the proper global constructor function required to wrap
2064 // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in
2065 // which case we just return it. Deopts to Runtime::kToObject if {receiver}
2066 // is undefined or null.
2067 IfBuilder receiver_is_smi(this);
2068 receiver_is_smi.If<HIsSmiAndBranch>(receiver);
2069 receiver_is_smi.Then();
2070 {
2071 // Use global Number function.
2072 Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX));
2073 }
2074 receiver_is_smi.Else();
2075 {
2076 // Determine {receiver} map and instance type.
2077 HValue* receiver_map =
2078 Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
2079 HValue* receiver_instance_type = Add<HLoadNamedField>(
2080 receiver_map, nullptr, HObjectAccess::ForMapInstanceType());
2081
2082 // First check whether {receiver} is already a spec object (fast case).
2083 IfBuilder receiver_is_not_spec_object(this);
2084 receiver_is_not_spec_object.If<HCompareNumericAndBranch>(
2085 receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE),
2086 Token::LT);
2087 receiver_is_not_spec_object.Then();
2088 {
2089 // Load the constructor function index from the {receiver} map.
2090 HValue* constructor_function_index = Add<HLoadNamedField>(
2091 receiver_map, nullptr,
2092 HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex());
2093
2094 // Check if {receiver} has a constructor (null and undefined have no
2095 // constructors, so we deoptimize to the runtime to throw an exception).
2096 IfBuilder constructor_function_index_is_invalid(this);
2097 constructor_function_index_is_invalid.If<HCompareNumericAndBranch>(
2098 constructor_function_index,
2099 Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ);
2100 constructor_function_index_is_invalid.ThenDeopt(
2101 Deoptimizer::kUndefinedOrNullInToObject);
2102 constructor_function_index_is_invalid.End();
2103
2104 // Use the global constructor function.
2105 Push(constructor_function_index);
2106 }
2107 receiver_is_not_spec_object.JoinContinuation(&wrap);
2108 }
2109 receiver_is_smi.JoinContinuation(&wrap);
2110
2111 // Wrap the receiver if necessary.
2112 IfBuilder if_wrap(this, &wrap);
2113 if_wrap.Then();
2114 {
2115 // Grab the constructor function index.
2116 HValue* constructor_index = Pop();
2117
2118 // Load native context.
2119 HValue* native_context = BuildGetNativeContext();
2120
2121 // Determine the initial map for the global constructor.
2122 HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index,
2123 nullptr, nullptr, FAST_ELEMENTS);
2124 HValue* constructor_initial_map = Add<HLoadNamedField>(
2125 constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
2126 // Allocate and initialize a JSValue wrapper.
2127 HValue* value =
2128 BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(),
2129 JS_VALUE_TYPE, HAllocationMode());
2130 Add<HStoreNamedField>(value, HObjectAccess::ForMap(),
2131 constructor_initial_map);
2132 HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2133 Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(),
2134 empty_fixed_array);
2135 Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(),
2136 empty_fixed_array);
2137 Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset(
2138 JSValue::kValueOffset),
2139 receiver);
2140 Push(value);
2141 }
2142 if_wrap.Else();
2143 { Push(receiver); }
2144 if_wrap.End();
2145 return Pop();
2146}
2147
2148
2149HAllocate* HGraphBuilder::BuildAllocate(
2150 HValue* object_size,
2151 HType type,
2152 InstanceType instance_type,
2153 HAllocationMode allocation_mode) {
2154 // Compute the effective allocation size.
2155 HValue* size = object_size;
2156 if (allocation_mode.CreateAllocationMementos()) {
2157 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
2158 size->ClearFlag(HValue::kCanOverflow);
2159 }
2160
2161 // Perform the actual allocation.
2162 HAllocate* object = Add<HAllocate>(
2163 size, type, allocation_mode.GetPretenureMode(),
2164 instance_type, allocation_mode.feedback_site());
2165
2166 // Setup the allocation memento.
2167 if (allocation_mode.CreateAllocationMementos()) {
2168 BuildCreateAllocationMemento(
2169 object, object_size, allocation_mode.current_site());
2170 }
2171
2172 return object;
2173}
2174
2175
2176HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
2177 HValue* right_length) {
2178 // Compute the combined string length and check against max string length.
2179 HValue* length = AddUncasted<HAdd>(left_length, right_length);
2180 // Check that length <= kMaxLength <=> length < MaxLength + 1.
2181 HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
2182 Add<HBoundsCheck>(length, max_length);
2183 return length;
2184}
2185
2186
2187HValue* HGraphBuilder::BuildCreateConsString(
2188 HValue* length,
2189 HValue* left,
2190 HValue* right,
2191 HAllocationMode allocation_mode) {
2192 // Determine the string instance types.
2193 HInstruction* left_instance_type = AddLoadStringInstanceType(left);
2194 HInstruction* right_instance_type = AddLoadStringInstanceType(right);
2195
2196 // Allocate the cons string object. HAllocate does not care whether we
2197 // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
2198 // CONS_STRING_TYPE here. Below we decide whether the cons string is
2199 // one-byte or two-byte and set the appropriate map.
2200 DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
2201 CONS_ONE_BYTE_STRING_TYPE));
2202 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
2203 HType::String(), CONS_STRING_TYPE,
2204 allocation_mode);
2205
2206 // Compute intersection and difference of instance types.
2207 HValue* anded_instance_types = AddUncasted<HBitwise>(
2208 Token::BIT_AND, left_instance_type, right_instance_type);
2209 HValue* xored_instance_types = AddUncasted<HBitwise>(
2210 Token::BIT_XOR, left_instance_type, right_instance_type);
2211
2212 // We create a one-byte cons string if
2213 // 1. both strings are one-byte, or
2214 // 2. at least one of the strings is two-byte, but happens to contain only
2215 // one-byte characters.
2216 // To do this, we check
2217 // 1. if both strings are one-byte, or if the one-byte data hint is set in
2218 // both strings, or
2219 // 2. if one of the strings has the one-byte data hint set and the other
2220 // string is one-byte.
2221 IfBuilder if_onebyte(this);
2222 STATIC_ASSERT(kOneByteStringTag != 0);
2223 STATIC_ASSERT(kOneByteDataHintMask != 0);
2224 if_onebyte.If<HCompareNumericAndBranch>(
2225 AddUncasted<HBitwise>(
2226 Token::BIT_AND, anded_instance_types,
2227 Add<HConstant>(static_cast<int32_t>(
2228 kStringEncodingMask | kOneByteDataHintMask))),
2229 graph()->GetConstant0(), Token::NE);
2230 if_onebyte.Or();
2231 STATIC_ASSERT(kOneByteStringTag != 0 &&
2232 kOneByteDataHintTag != 0 &&
2233 kOneByteDataHintTag != kOneByteStringTag);
2234 if_onebyte.If<HCompareNumericAndBranch>(
2235 AddUncasted<HBitwise>(
2236 Token::BIT_AND, xored_instance_types,
2237 Add<HConstant>(static_cast<int32_t>(
2238 kOneByteStringTag | kOneByteDataHintTag))),
2239 Add<HConstant>(static_cast<int32_t>(
2240 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
2241 if_onebyte.Then();
2242 {
2243 // We can safely skip the write barrier for storing the map here.
2244 Add<HStoreNamedField>(
2245 result, HObjectAccess::ForMap(),
2246 Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
2247 }
2248 if_onebyte.Else();
2249 {
2250 // We can safely skip the write barrier for storing the map here.
2251 Add<HStoreNamedField>(
2252 result, HObjectAccess::ForMap(),
2253 Add<HConstant>(isolate()->factory()->cons_string_map()));
2254 }
2255 if_onebyte.End();
2256
2257 // Initialize the cons string fields.
2258 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2259 Add<HConstant>(String::kEmptyHashField));
2260 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2261 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2262 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2263
2264 // Count the native string addition.
2265 AddIncrementCounter(isolate()->counters()->string_add_native());
2266
2267 return result;
2268}
2269
2270
2271void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2272 HValue* src_offset,
2273 String::Encoding src_encoding,
2274 HValue* dst,
2275 HValue* dst_offset,
2276 String::Encoding dst_encoding,
2277 HValue* length) {
2278 DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
2279 src_encoding == String::ONE_BYTE_ENCODING);
2280 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2281 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2282 {
2283 HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2284 HValue* value =
2285 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2286 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2287 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2288 }
2289 loop.EndBody();
2290}
2291
2292
2293HValue* HGraphBuilder::BuildObjectSizeAlignment(
2294 HValue* unaligned_size, int header_size) {
2295 DCHECK((header_size & kObjectAlignmentMask) == 0);
2296 HValue* size = AddUncasted<HAdd>(
2297 unaligned_size, Add<HConstant>(static_cast<int32_t>(
2298 header_size + kObjectAlignmentMask)));
2299 size->ClearFlag(HValue::kCanOverflow);
2300 return AddUncasted<HBitwise>(
2301 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2302 ~kObjectAlignmentMask)));
2303}
2304
2305
2306HValue* HGraphBuilder::BuildUncheckedStringAdd(
2307 HValue* left,
2308 HValue* right,
2309 HAllocationMode allocation_mode) {
2310 // Determine the string lengths.
2311 HValue* left_length = AddLoadStringLength(left);
2312 HValue* right_length = AddLoadStringLength(right);
2313
2314 // Compute the combined string length.
2315 HValue* length = BuildAddStringLengths(left_length, right_length);
2316
2317 // Do some manual constant folding here.
2318 if (left_length->IsConstant()) {
2319 HConstant* c_left_length = HConstant::cast(left_length);
2320 DCHECK_NE(0, c_left_length->Integer32Value());
2321 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2322 // The right string contains at least one character.
2323 return BuildCreateConsString(length, left, right, allocation_mode);
2324 }
2325 } else if (right_length->IsConstant()) {
2326 HConstant* c_right_length = HConstant::cast(right_length);
2327 DCHECK_NE(0, c_right_length->Integer32Value());
2328 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2329 // The left string contains at least one character.
2330 return BuildCreateConsString(length, left, right, allocation_mode);
2331 }
2332 }
2333
2334 // Check if we should create a cons string.
2335 IfBuilder if_createcons(this);
2336 if_createcons.If<HCompareNumericAndBranch>(
2337 length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2338 if_createcons.Then();
2339 {
2340 // Create a cons string.
2341 Push(BuildCreateConsString(length, left, right, allocation_mode));
2342 }
2343 if_createcons.Else();
2344 {
2345 // Determine the string instance types.
2346 HValue* left_instance_type = AddLoadStringInstanceType(left);
2347 HValue* right_instance_type = AddLoadStringInstanceType(right);
2348
2349 // Compute union and difference of instance types.
2350 HValue* ored_instance_types = AddUncasted<HBitwise>(
2351 Token::BIT_OR, left_instance_type, right_instance_type);
2352 HValue* xored_instance_types = AddUncasted<HBitwise>(
2353 Token::BIT_XOR, left_instance_type, right_instance_type);
2354
2355 // Check if both strings have the same encoding and both are
2356 // sequential.
2357 IfBuilder if_sameencodingandsequential(this);
2358 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2359 AddUncasted<HBitwise>(
2360 Token::BIT_AND, xored_instance_types,
2361 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2362 graph()->GetConstant0(), Token::EQ);
2363 if_sameencodingandsequential.And();
2364 STATIC_ASSERT(kSeqStringTag == 0);
2365 if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2366 AddUncasted<HBitwise>(
2367 Token::BIT_AND, ored_instance_types,
2368 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2369 graph()->GetConstant0(), Token::EQ);
2370 if_sameencodingandsequential.Then();
2371 {
2372 HConstant* string_map =
2373 Add<HConstant>(isolate()->factory()->string_map());
2374 HConstant* one_byte_string_map =
2375 Add<HConstant>(isolate()->factory()->one_byte_string_map());
2376
2377 // Determine map and size depending on whether result is one-byte string.
2378 IfBuilder if_onebyte(this);
2379 STATIC_ASSERT(kOneByteStringTag != 0);
2380 if_onebyte.If<HCompareNumericAndBranch>(
2381 AddUncasted<HBitwise>(
2382 Token::BIT_AND, ored_instance_types,
2383 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2384 graph()->GetConstant0(), Token::NE);
2385 if_onebyte.Then();
2386 {
2387 // Allocate sequential one-byte string object.
2388 Push(length);
2389 Push(one_byte_string_map);
2390 }
2391 if_onebyte.Else();
2392 {
2393 // Allocate sequential two-byte string object.
2394 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2395 size->ClearFlag(HValue::kCanOverflow);
2396 size->SetFlag(HValue::kUint32);
2397 Push(size);
2398 Push(string_map);
2399 }
2400 if_onebyte.End();
2401 HValue* map = Pop();
2402
2403 // Calculate the number of bytes needed for the characters in the
2404 // string while observing object alignment.
2405 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2406 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2407
2408 IfBuilder if_size(this);
2409 if_size.If<HCompareNumericAndBranch>(
2410 size, Add<HConstant>(Page::kMaxRegularHeapObjectSize), Token::LT);
2411 if_size.Then();
2412 {
2413 // Allocate the string object. HAllocate does not care whether we pass
2414 // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
2415 HAllocate* result =
2416 BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode);
2417 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2418
2419 // Initialize the string fields.
2420 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2421 Add<HConstant>(String::kEmptyHashField));
2422 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2423
2424 // Copy characters to the result string.
2425 IfBuilder if_twobyte(this);
2426 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2427 if_twobyte.Then();
2428 {
2429 // Copy characters from the left string.
2430 BuildCopySeqStringChars(
2431 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2432 graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length);
2433
2434 // Copy characters from the right string.
2435 BuildCopySeqStringChars(
2436 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2437 left_length, String::TWO_BYTE_ENCODING, right_length);
2438 }
2439 if_twobyte.Else();
2440 {
2441 // Copy characters from the left string.
2442 BuildCopySeqStringChars(
2443 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2444 graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length);
2445
2446 // Copy characters from the right string.
2447 BuildCopySeqStringChars(
2448 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2449 left_length, String::ONE_BYTE_ENCODING, right_length);
2450 }
2451 if_twobyte.End();
2452
2453 // Count the native string addition.
2454 AddIncrementCounter(isolate()->counters()->string_add_native());
2455
2456 // Return the sequential string.
2457 Push(result);
2458 }
2459 if_size.Else();
2460 {
2461 // Fallback to the runtime to add the two strings. The string has to be
2462 // allocated in LO space.
2463 Add<HPushArguments>(left, right);
2464 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2465 }
2466 if_size.End();
2467 }
2468 if_sameencodingandsequential.Else();
2469 {
2470 // Fallback to the runtime to add the two strings.
2471 Add<HPushArguments>(left, right);
2472 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2473 }
2474 if_sameencodingandsequential.End();
2475 }
2476 if_createcons.End();
2477
2478 return Pop();
2479}
2480
2481
2482HValue* HGraphBuilder::BuildStringAdd(
2483 HValue* left,
2484 HValue* right,
2485 HAllocationMode allocation_mode) {
2486 NoObservableSideEffectsScope no_effects(this);
2487
2488 // Determine string lengths.
2489 HValue* left_length = AddLoadStringLength(left);
2490 HValue* right_length = AddLoadStringLength(right);
2491
2492 // Check if left string is empty.
2493 IfBuilder if_leftempty(this);
2494 if_leftempty.If<HCompareNumericAndBranch>(
2495 left_length, graph()->GetConstant0(), Token::EQ);
2496 if_leftempty.Then();
2497 {
2498 // Count the native string addition.
2499 AddIncrementCounter(isolate()->counters()->string_add_native());
2500
2501 // Just return the right string.
2502 Push(right);
2503 }
2504 if_leftempty.Else();
2505 {
2506 // Check if right string is empty.
2507 IfBuilder if_rightempty(this);
2508 if_rightempty.If<HCompareNumericAndBranch>(
2509 right_length, graph()->GetConstant0(), Token::EQ);
2510 if_rightempty.Then();
2511 {
2512 // Count the native string addition.
2513 AddIncrementCounter(isolate()->counters()->string_add_native());
2514
2515 // Just return the left string.
2516 Push(left);
2517 }
2518 if_rightempty.Else();
2519 {
2520 // Add the two non-empty strings.
2521 Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2522 }
2523 if_rightempty.End();
2524 }
2525 if_leftempty.End();
2526
2527 return Pop();
2528}
2529
2530
2531HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2532 HValue* checked_object,
2533 HValue* key,
2534 HValue* val,
2535 bool is_js_array,
2536 ElementsKind elements_kind,
2537 PropertyAccessType access_type,
2538 LoadKeyedHoleMode load_mode,
2539 KeyedAccessStoreMode store_mode) {
2540 DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() ||
2541 checked_object->IsCheckMaps());
2542 DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array);
2543 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2544 // on a HElementsTransition instruction. The flag can also be removed if the
2545 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2546 // ElementsKind transitions. Finally, the dependency can be removed for stores
2547 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2548 // generated store code.
2549 if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2550 (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2551 checked_object->ClearDependsOnFlag(kElementsKind);
2552 }
2553
2554 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2555 bool fast_elements = IsFastObjectElementsKind(elements_kind);
2556 HValue* elements = AddLoadElements(checked_object);
2557 if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2558 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2559 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2560 elements, isolate()->factory()->fixed_array_map());
2561 check_cow_map->ClearDependsOnFlag(kElementsKind);
2562 }
2563 HInstruction* length = NULL;
2564 if (is_js_array) {
2565 length = Add<HLoadNamedField>(
2566 checked_object->ActualValue(), checked_object,
2567 HObjectAccess::ForArrayLength(elements_kind));
2568 } else {
2569 length = AddLoadFixedArrayLength(elements);
2570 }
2571 length->set_type(HType::Smi());
2572 HValue* checked_key = NULL;
2573 if (IsFixedTypedArrayElementsKind(elements_kind)) {
2574 checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
2575
2576 HValue* external_pointer = Add<HLoadNamedField>(
2577 elements, nullptr,
2578 HObjectAccess::ForFixedTypedArrayBaseExternalPointer());
2579 HValue* base_pointer = Add<HLoadNamedField>(
2580 elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer());
2581 HValue* backing_store = AddUncasted<HAdd>(
2582 external_pointer, base_pointer, Strength::WEAK, AddOfExternalAndTagged);
2583
2584 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2585 NoObservableSideEffectsScope no_effects(this);
2586 IfBuilder length_checker(this);
2587 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2588 length_checker.Then();
2589 IfBuilder negative_checker(this);
2590 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2591 key, graph()->GetConstant0(), Token::GTE);
2592 negative_checker.Then();
2593 HInstruction* result = AddElementAccess(
2594 backing_store, key, val, bounds_check, checked_object->ActualValue(),
2595 elements_kind, access_type);
2596 negative_checker.ElseDeopt(Deoptimizer::kNegativeKeyEncountered);
2597 negative_checker.End();
2598 length_checker.End();
2599 return result;
2600 } else {
2601 DCHECK(store_mode == STANDARD_STORE);
2602 checked_key = Add<HBoundsCheck>(key, length);
2603 return AddElementAccess(backing_store, checked_key, val, checked_object,
2604 checked_object->ActualValue(), elements_kind,
2605 access_type);
2606 }
2607 }
2608 DCHECK(fast_smi_only_elements ||
2609 fast_elements ||
2610 IsFastDoubleElementsKind(elements_kind));
2611
2612 // In case val is stored into a fast smi array, assure that the value is a smi
2613 // before manipulating the backing store. Otherwise the actual store may
2614 // deopt, leaving the backing store in an invalid state.
2615 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2616 !val->type().IsSmi()) {
2617 val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2618 }
2619
2620 if (IsGrowStoreMode(store_mode)) {
2621 NoObservableSideEffectsScope no_effects(this);
2622 Representation representation = HStoreKeyed::RequiredValueRepresentation(
2623 elements_kind, STORE_TO_INITIALIZED_ENTRY);
2624 val = AddUncasted<HForceRepresentation>(val, representation);
2625 elements = BuildCheckForCapacityGrow(checked_object, elements,
2626 elements_kind, length, key,
2627 is_js_array, access_type);
2628 checked_key = key;
2629 } else {
2630 checked_key = Add<HBoundsCheck>(key, length);
2631
2632 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2633 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2634 NoObservableSideEffectsScope no_effects(this);
2635 elements = BuildCopyElementsOnWrite(checked_object, elements,
2636 elements_kind, length);
2637 } else {
2638 HCheckMaps* check_cow_map = Add<HCheckMaps>(
2639 elements, isolate()->factory()->fixed_array_map());
2640 check_cow_map->ClearDependsOnFlag(kElementsKind);
2641 }
2642 }
2643 }
2644 return AddElementAccess(elements, checked_key, val, checked_object, nullptr,
2645 elements_kind, access_type, load_mode);
2646}
2647
2648
2649HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2650 JSArrayBuilder* array_builder,
2651 HValue* length_argument) {
2652 if (length_argument->IsConstant() &&
2653 HConstant::cast(length_argument)->HasSmiValue()) {
2654 int array_length = HConstant::cast(length_argument)->Integer32Value();
2655 if (array_length == 0) {
2656 return array_builder->AllocateEmptyArray();
2657 } else {
2658 return array_builder->AllocateArray(length_argument,
2659 array_length,
2660 length_argument);
2661 }
2662 }
2663
2664 HValue* constant_zero = graph()->GetConstant0();
2665 HConstant* max_alloc_length =
2666 Add<HConstant>(JSArray::kInitialMaxFastElementArray);
2667 HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2668 max_alloc_length);
2669 IfBuilder if_builder(this);
2670 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2671 Token::EQ);
2672 if_builder.Then();
2673 const int initial_capacity = JSArray::kPreallocatedArrayElements;
2674 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2675 Push(initial_capacity_node); // capacity
2676 Push(constant_zero); // length
2677 if_builder.Else();
2678 if (!(top_info()->IsStub()) &&
2679 IsFastPackedElementsKind(array_builder->kind())) {
2680 // We'll come back later with better (holey) feedback.
2681 if_builder.Deopt(
2682 Deoptimizer::kHoleyArrayDespitePackedElements_kindFeedback);
2683 } else {
2684 Push(checked_length); // capacity
2685 Push(checked_length); // length
2686 }
2687 if_builder.End();
2688
2689 // Figure out total size
2690 HValue* length = Pop();
2691 HValue* capacity = Pop();
2692 return array_builder->AllocateArray(capacity, max_alloc_length, length);
2693}
2694
2695
2696HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2697 HValue* capacity) {
2698 int elements_size = IsFastDoubleElementsKind(kind)
2699 ? kDoubleSize
2700 : kPointerSize;
2701
2702 HConstant* elements_size_value = Add<HConstant>(elements_size);
2703 HInstruction* mul =
2704 HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(),
2705 elements_size_value);
2706 AddInstruction(mul);
2707 mul->ClearFlag(HValue::kCanOverflow);
2708
2709 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2710
2711 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2712 HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2713 total_size->ClearFlag(HValue::kCanOverflow);
2714 return total_size;
2715}
2716
2717
2718HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2719 int base_size = JSArray::kSize;
2720 if (mode == TRACK_ALLOCATION_SITE) {
2721 base_size += AllocationMemento::kSize;
2722 }
2723 HConstant* size_in_bytes = Add<HConstant>(base_size);
2724 return Add<HAllocate>(
2725 size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE);
2726}
2727
2728
2729HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2730 ElementsKind kind,
2731 int capacity) {
2732 int base_size = IsFastDoubleElementsKind(kind)
2733 ? FixedDoubleArray::SizeFor(capacity)
2734 : FixedArray::SizeFor(capacity);
2735
2736 return Add<HConstant>(base_size);
2737}
2738
2739
2740HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2741 HValue* size_in_bytes) {
2742 InstanceType instance_type = IsFastDoubleElementsKind(kind)
2743 ? FIXED_DOUBLE_ARRAY_TYPE
2744 : FIXED_ARRAY_TYPE;
2745
2746 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
2747 instance_type);
2748}
2749
2750
2751void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2752 ElementsKind kind,
2753 HValue* capacity) {
2754 Factory* factory = isolate()->factory();
2755 Handle<Map> map = IsFastDoubleElementsKind(kind)
2756 ? factory->fixed_double_array_map()
2757 : factory->fixed_array_map();
2758
2759 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2760 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2761 capacity);
2762}
2763
2764
2765HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
2766 HValue* capacity) {
2767 // The HForceRepresentation is to prevent possible deopt on int-smi
2768 // conversion after allocation but before the new object fields are set.
2769 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2770 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
2771 HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
2772 BuildInitializeElementsHeader(new_array, kind, capacity);
2773 return new_array;
2774}
2775
2776
2777void HGraphBuilder::BuildJSArrayHeader(HValue* array,
2778 HValue* array_map,
2779 HValue* elements,
2780 AllocationSiteMode mode,
2781 ElementsKind elements_kind,
2782 HValue* allocation_site_payload,
2783 HValue* length_field) {
2784 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2785
2786 HConstant* empty_fixed_array =
2787 Add<HConstant>(isolate()->factory()->empty_fixed_array());
2788
2789 Add<HStoreNamedField>(
2790 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
2791
2792 Add<HStoreNamedField>(
2793 array, HObjectAccess::ForElementsPointer(),
2794 elements != NULL ? elements : empty_fixed_array);
2795
2796 Add<HStoreNamedField>(
2797 array, HObjectAccess::ForArrayLength(elements_kind), length_field);
2798
2799 if (mode == TRACK_ALLOCATION_SITE) {
2800 BuildCreateAllocationMemento(
2801 array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2802 }
2803}
2804
2805
2806HInstruction* HGraphBuilder::AddElementAccess(
2807 HValue* elements, HValue* checked_key, HValue* val, HValue* dependency,
2808 HValue* backing_store_owner, ElementsKind elements_kind,
2809 PropertyAccessType access_type, LoadKeyedHoleMode load_mode) {
2810 if (access_type == STORE) {
2811 DCHECK(val != NULL);
2812 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
2813 val = Add<HClampToUint8>(val);
2814 }
2815 return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner,
2816 elements_kind, STORE_TO_INITIALIZED_ENTRY);
2817 }
2818
2819 DCHECK(access_type == LOAD);
2820 DCHECK(val == NULL);
2821 HLoadKeyed* load =
2822 Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner,
2823 elements_kind, load_mode);
2824 if (elements_kind == UINT32_ELEMENTS) {
2825 graph()->RecordUint32Instruction(load);
2826 }
2827 return load;
2828}
2829
2830
2831HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
2832 HValue* dependency) {
2833 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
2834}
2835
2836
2837HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
2838 HValue* dependency) {
2839 return Add<HLoadNamedField>(
2840 object, dependency, HObjectAccess::ForElementsPointer());
2841}
2842
2843
2844HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
2845 HValue* array,
2846 HValue* dependency) {
2847 return Add<HLoadNamedField>(
2848 array, dependency, HObjectAccess::ForFixedArrayLength());
2849}
2850
2851
2852HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
2853 ElementsKind kind,
2854 HValue* dependency) {
2855 return Add<HLoadNamedField>(
2856 array, dependency, HObjectAccess::ForArrayLength(kind));
2857}
2858
2859
2860HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2861 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2862 graph_->GetConstant1());
2863
2864 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2865 new_capacity->ClearFlag(HValue::kCanOverflow);
2866
2867 HValue* min_growth = Add<HConstant>(16);
2868
2869 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2870 new_capacity->ClearFlag(HValue::kCanOverflow);
2871
2872 return new_capacity;
2873}
2874
2875
2876HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2877 HValue* elements,
2878 ElementsKind kind,
2879 ElementsKind new_kind,
2880 HValue* length,
2881 HValue* new_capacity) {
2882 Add<HBoundsCheck>(new_capacity, Add<HConstant>(
2883 (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
2884 ElementsKindToShiftSize(new_kind)));
2885
2886 HValue* new_elements =
2887 BuildAllocateAndInitializeArray(new_kind, new_capacity);
2888
2889 BuildCopyElements(elements, kind, new_elements,
2890 new_kind, length, new_capacity);
2891
2892 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2893 new_elements);
2894
2895 return new_elements;
2896}
2897
2898
2899void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
2900 ElementsKind elements_kind,
2901 HValue* from,
2902 HValue* to,
2903 HValue* value) {
2904 if (to == NULL) {
2905 to = AddLoadFixedArrayLength(elements);
2906 }
2907
2908 // Special loop unfolding case
2909 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
2910 kElementLoopUnrollThreshold);
2911 int initial_capacity = -1;
2912 if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2913 int constant_from = from->GetInteger32Constant();
2914 int constant_to = to->GetInteger32Constant();
2915
2916 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
2917 initial_capacity = constant_to;
2918 }
2919 }
2920
2921 if (initial_capacity >= 0) {
2922 for (int i = 0; i < initial_capacity; i++) {
2923 HInstruction* key = Add<HConstant>(i);
2924 Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind);
2925 }
2926 } else {
2927 // Carefully loop backwards so that the "from" remains live through the loop
2928 // rather than the to. This often corresponds to keeping length live rather
2929 // then capacity, which helps register allocation, since length is used more
2930 // other than capacity after filling with holes.
2931 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2932
2933 HValue* key = builder.BeginBody(to, from, Token::GT);
2934
2935 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
2936 adjusted_key->ClearFlag(HValue::kCanOverflow);
2937
2938 Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind);
2939
2940 builder.EndBody();
2941 }
2942}
2943
2944
2945void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2946 ElementsKind elements_kind,
2947 HValue* from,
2948 HValue* to) {
2949 // Fast elements kinds need to be initialized in case statements below cause a
2950 // garbage collection.
2951
2952 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2953 ? graph()->GetConstantHole()
2954 : Add<HConstant>(HConstant::kHoleNaN);
2955
2956 // Since we're about to store a hole value, the store instruction below must
2957 // assume an elements kind that supports heap object values.
2958 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2959 elements_kind = FAST_HOLEY_ELEMENTS;
2960 }
2961
2962 BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
2963}
2964
2965
2966void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
2967 HValue* to_properties, HValue* length,
2968 HValue* capacity) {
2969 ElementsKind kind = FAST_ELEMENTS;
2970
2971 BuildFillElementsWithValue(to_properties, kind, length, capacity,
2972 graph()->GetConstantUndefined());
2973
2974 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2975
2976 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
2977
2978 key = AddUncasted<HSub>(key, graph()->GetConstant1());
2979 key->ClearFlag(HValue::kCanOverflow);
2980
2981 HValue* element =
2982 Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind);
2983
2984 Add<HStoreKeyed>(to_properties, key, element, nullptr, kind);
2985
2986 builder.EndBody();
2987}
2988
2989
2990void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2991 ElementsKind from_elements_kind,
2992 HValue* to_elements,
2993 ElementsKind to_elements_kind,
2994 HValue* length,
2995 HValue* capacity) {
2996 int constant_capacity = -1;
2997 if (capacity != NULL &&
2998 capacity->IsConstant() &&
2999 HConstant::cast(capacity)->HasInteger32Value()) {
3000 int constant_candidate = HConstant::cast(capacity)->Integer32Value();
3001 if (constant_candidate <= kElementLoopUnrollThreshold) {
3002 constant_capacity = constant_candidate;
3003 }
3004 }
3005
3006 bool pre_fill_with_holes =
3007 IsFastDoubleElementsKind(from_elements_kind) &&
3008 IsFastObjectElementsKind(to_elements_kind);
3009 if (pre_fill_with_holes) {
3010 // If the copy might trigger a GC, make sure that the FixedArray is
3011 // pre-initialized with holes to make sure that it's always in a
3012 // consistent state.
3013 BuildFillElementsWithHole(to_elements, to_elements_kind,
3014 graph()->GetConstant0(), NULL);
3015 }
3016
3017 if (constant_capacity != -1) {
3018 // Unroll the loop for small elements kinds.
3019 for (int i = 0; i < constant_capacity; i++) {
3020 HValue* key_constant = Add<HConstant>(i);
3021 HInstruction* value = Add<HLoadKeyed>(
3022 from_elements, key_constant, nullptr, nullptr, from_elements_kind);
3023 Add<HStoreKeyed>(to_elements, key_constant, value, nullptr,
3024 to_elements_kind);
3025 }
3026 } else {
3027 if (!pre_fill_with_holes &&
3028 (capacity == NULL || !length->Equals(capacity))) {
3029 BuildFillElementsWithHole(to_elements, to_elements_kind,
3030 length, NULL);
3031 }
3032
3033 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
3034
3035 HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
3036 Token::GT);
3037
3038 key = AddUncasted<HSub>(key, graph()->GetConstant1());
3039 key->ClearFlag(HValue::kCanOverflow);
3040
3041 HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr,
3042 from_elements_kind, ALLOW_RETURN_HOLE);
3043
3044 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
3045 IsFastSmiElementsKind(to_elements_kind))
3046 ? FAST_HOLEY_ELEMENTS : to_elements_kind;
3047
3048 if (IsHoleyElementsKind(from_elements_kind) &&
3049 from_elements_kind != to_elements_kind) {
3050 IfBuilder if_hole(this);
3051 if_hole.If<HCompareHoleAndBranch>(element);
3052 if_hole.Then();
3053 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
3054 ? Add<HConstant>(HConstant::kHoleNaN)
3055 : graph()->GetConstantHole();
3056 Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind);
3057 if_hole.Else();
3058 HStoreKeyed* store =
3059 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
3060 store->SetFlag(HValue::kAllowUndefinedAsNaN);
3061 if_hole.End();
3062 } else {
3063 HStoreKeyed* store =
3064 Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
3065 store->SetFlag(HValue::kAllowUndefinedAsNaN);
3066 }
3067
3068 builder.EndBody();
3069 }
3070
3071 Counters* counters = isolate()->counters();
3072 AddIncrementCounter(counters->inlined_copied_elements());
3073}
3074
3075
3076HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
3077 HValue* allocation_site,
3078 AllocationSiteMode mode,
3079 ElementsKind kind) {
3080 HAllocate* array = AllocateJSArrayObject(mode);
3081
3082 HValue* map = AddLoadMap(boilerplate);
3083 HValue* elements = AddLoadElements(boilerplate);
3084 HValue* length = AddLoadArrayLength(boilerplate, kind);
3085
3086 BuildJSArrayHeader(array,
3087 map,
3088 elements,
3089 mode,
3090 FAST_ELEMENTS,
3091 allocation_site,
3092 length);
3093 return array;
3094}
3095
3096
3097HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
3098 HValue* allocation_site,
3099 AllocationSiteMode mode) {
3100 HAllocate* array = AllocateJSArrayObject(mode);
3101
3102 HValue* map = AddLoadMap(boilerplate);
3103
3104 BuildJSArrayHeader(array,
3105 map,
3106 NULL, // set elements to empty fixed array
3107 mode,
3108 FAST_ELEMENTS,
3109 allocation_site,
3110 graph()->GetConstant0());
3111 return array;
3112}
3113
3114
3115HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
3116 HValue* allocation_site,
3117 AllocationSiteMode mode,
3118 ElementsKind kind) {
3119 HValue* boilerplate_elements = AddLoadElements(boilerplate);
3120 HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
3121
3122 // Generate size calculation code here in order to make it dominate
3123 // the JSArray allocation.
3124 HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
3125
3126 // Create empty JSArray object for now, store elimination should remove
3127 // redundant initialization of elements and length fields and at the same
3128 // time the object will be fully prepared for GC if it happens during
3129 // elements allocation.
3130 HValue* result = BuildCloneShallowArrayEmpty(
3131 boilerplate, allocation_site, mode);
3132
3133 HAllocate* elements = BuildAllocateElements(kind, elements_size);
3134
3135 // This function implicitly relies on the fact that the
3136 // FastCloneShallowArrayStub is called only for literals shorter than
3137 // JSArray::kInitialMaxFastElementArray.
3138 // Can't add HBoundsCheck here because otherwise the stub will eager a frame.
3139 HConstant* size_upper_bound = EstablishElementsAllocationSize(
3140 kind, JSArray::kInitialMaxFastElementArray);
3141 elements->set_size_upper_bound(size_upper_bound);
3142
3143 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
3144
3145 // The allocation for the cloned array above causes register pressure on
3146 // machines with low register counts. Force a reload of the boilerplate
3147 // elements here to free up a register for the allocation to avoid unnecessary
3148 // spillage.
3149 boilerplate_elements = AddLoadElements(boilerplate);
3150 boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
3151
3152 // Copy the elements array header.
3153 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
3154 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
3155 Add<HStoreNamedField>(
3156 elements, access,
3157 Add<HLoadNamedField>(boilerplate_elements, nullptr, access));
3158 }
3159
3160 // And the result of the length
3161 HValue* length = AddLoadArrayLength(boilerplate, kind);
3162 Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
3163
3164 BuildCopyElements(boilerplate_elements, kind, elements,
3165 kind, length, NULL);
3166 return result;
3167}
3168
3169
3170void HGraphBuilder::BuildCompareNil(HValue* value, Type* type,
3171 HIfContinuation* continuation,
3172 MapEmbedding map_embedding) {
3173 IfBuilder if_nil(this);
3174 bool some_case_handled = false;
3175 bool some_case_missing = false;
3176
3177 if (type->Maybe(Type::Null())) {
3178 if (some_case_handled) if_nil.Or();
3179 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
3180 some_case_handled = true;
3181 } else {
3182 some_case_missing = true;
3183 }
3184
3185 if (type->Maybe(Type::Undefined())) {
3186 if (some_case_handled) if_nil.Or();
3187 if_nil.If<HCompareObjectEqAndBranch>(value,
3188 graph()->GetConstantUndefined());
3189 some_case_handled = true;
3190 } else {
3191 some_case_missing = true;
3192 }
3193
3194 if (type->Maybe(Type::Undetectable())) {
3195 if (some_case_handled) if_nil.Or();
3196 if_nil.If<HIsUndetectableAndBranch>(value);
3197 some_case_handled = true;
3198 } else {
3199 some_case_missing = true;
3200 }
3201
3202 if (some_case_missing) {
3203 if_nil.Then();
3204 if_nil.Else();
3205 if (type->NumClasses() == 1) {
3206 BuildCheckHeapObject(value);
3207 // For ICs, the map checked below is a sentinel map that gets replaced by
3208 // the monomorphic map when the code is used as a template to generate a
3209 // new IC. For optimized functions, there is no sentinel map, the map
3210 // emitted below is the actual monomorphic map.
3211 if (map_embedding == kEmbedMapsViaWeakCells) {
3212 HValue* cell =
3213 Add<HConstant>(Map::WeakCellForMap(type->Classes().Current()));
3214 HValue* expected_map = Add<HLoadNamedField>(
3215 cell, nullptr, HObjectAccess::ForWeakCellValue());
3216 HValue* map =
3217 Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
3218 IfBuilder map_check(this);
3219 map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
3220 map_check.ThenDeopt(Deoptimizer::kUnknownMap);
3221 map_check.End();
3222 } else {
3223 DCHECK(map_embedding == kEmbedMapsDirectly);
3224 Add<HCheckMaps>(value, type->Classes().Current());
3225 }
3226 } else {
3227 if_nil.Deopt(Deoptimizer::kTooManyUndetectableTypes);
3228 }
3229 }
3230
3231 if_nil.CaptureContinuation(continuation);
3232}
3233
3234
3235void HGraphBuilder::BuildCreateAllocationMemento(
3236 HValue* previous_object,
3237 HValue* previous_object_size,
3238 HValue* allocation_site) {
3239 DCHECK(allocation_site != NULL);
3240 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
3241 previous_object, previous_object_size, HType::HeapObject());
3242 AddStoreMapConstant(
3243 allocation_memento, isolate()->factory()->allocation_memento_map());
3244 Add<HStoreNamedField>(
3245 allocation_memento,
3246 HObjectAccess::ForAllocationMementoSite(),
3247 allocation_site);
3248 if (FLAG_allocation_site_pretenuring) {
3249 HValue* memento_create_count =
3250 Add<HLoadNamedField>(allocation_site, nullptr,
3251 HObjectAccess::ForAllocationSiteOffset(
3252 AllocationSite::kPretenureCreateCountOffset));
3253 memento_create_count = AddUncasted<HAdd>(
3254 memento_create_count, graph()->GetConstant1());
3255 // This smi value is reset to zero after every gc, overflow isn't a problem
3256 // since the counter is bounded by the new space size.
3257 memento_create_count->ClearFlag(HValue::kCanOverflow);
3258 Add<HStoreNamedField>(
3259 allocation_site, HObjectAccess::ForAllocationSiteOffset(
3260 AllocationSite::kPretenureCreateCountOffset), memento_create_count);
3261 }
3262}
3263
3264
3265HInstruction* HGraphBuilder::BuildGetNativeContext() {
3266 return Add<HLoadNamedField>(
3267 context(), nullptr,
3268 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3269}
3270
3271
3272HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
3273 // Get the global object, then the native context
3274 HInstruction* context = Add<HLoadNamedField>(
3275 closure, nullptr, HObjectAccess::ForFunctionContextPointer());
3276 return Add<HLoadNamedField>(
3277 context, nullptr,
3278 HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
3279}
3280
3281
3282HInstruction* HGraphBuilder::BuildGetScriptContext(int context_index) {
3283 HValue* native_context = BuildGetNativeContext();
3284 HValue* script_context_table = Add<HLoadNamedField>(
3285 native_context, nullptr,
3286 HObjectAccess::ForContextSlot(Context::SCRIPT_CONTEXT_TABLE_INDEX));
3287 return Add<HLoadNamedField>(script_context_table, nullptr,
3288 HObjectAccess::ForScriptContext(context_index));
3289}
3290
3291
3292HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
3293 HValue* script_context = context();
3294 if (depth != NULL) {
3295 HValue* zero = graph()->GetConstant0();
3296
3297 Push(script_context);
3298 Push(depth);
3299
3300 LoopBuilder loop(this);
3301 loop.BeginBody(2); // Drop script_context and depth from last environment
3302 // to appease live range building without simulates.
3303 depth = Pop();
3304 script_context = Pop();
3305
3306 script_context = Add<HLoadNamedField>(
3307 script_context, nullptr,
3308 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3309 depth = AddUncasted<HSub>(depth, graph()->GetConstant1());
3310 depth->ClearFlag(HValue::kCanOverflow);
3311
3312 IfBuilder if_break(this);
3313 if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ);
3314 if_break.Then();
3315 {
3316 Push(script_context); // The result.
3317 loop.Break();
3318 }
3319 if_break.Else();
3320 {
3321 Push(script_context);
3322 Push(depth);
3323 }
3324 loop.EndBody();
3325 if_break.End();
3326
3327 script_context = Pop();
3328 } else if (depth_value > 0) {
3329 // Unroll the above loop.
3330 for (int i = 0; i < depth_value; i++) {
3331 script_context = Add<HLoadNamedField>(
3332 script_context, nullptr,
3333 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3334 }
3335 }
3336 return script_context;
3337}
3338
3339
3340HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3341 HInstruction* native_context = BuildGetNativeContext();
3342 HInstruction* index =
3343 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3344 return Add<HLoadKeyed>(native_context, index, nullptr, nullptr,
3345 FAST_ELEMENTS);
3346}
3347
3348
3349HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object,
3350 HValue* checked_object,
3351 FieldIndex index) {
3352 NoObservableSideEffectsScope scope(this);
3353 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3354 index.offset(), Representation::Tagged());
3355 HInstruction* buffer = Add<HLoadNamedField>(
3356 object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer());
3357 HInstruction* field = Add<HLoadNamedField>(object, checked_object, access);
3358
3359 HInstruction* flags = Add<HLoadNamedField>(
3360 buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField());
3361 HValue* was_neutered_mask =
3362 Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift);
3363 HValue* was_neutered_test =
3364 AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask);
3365
3366 IfBuilder if_was_neutered(this);
3367 if_was_neutered.If<HCompareNumericAndBranch>(
3368 was_neutered_test, graph()->GetConstant0(), Token::NE);
3369 if_was_neutered.Then();
3370 Push(graph()->GetConstant0());
3371 if_was_neutered.Else();
3372 Push(field);
3373 if_was_neutered.End();
3374
3375 return Pop();
3376}
3377
3378
3379HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3380 ElementsKind kind,
3381 HValue* allocation_site_payload,
3382 HValue* constructor_function,
3383 AllocationSiteOverrideMode override_mode) :
3384 builder_(builder),
3385 kind_(kind),
3386 allocation_site_payload_(allocation_site_payload),
3387 constructor_function_(constructor_function) {
3388 DCHECK(!allocation_site_payload->IsConstant() ||
3389 HConstant::cast(allocation_site_payload)->handle(
3390 builder_->isolate())->IsAllocationSite());
3391 mode_ = override_mode == DISABLE_ALLOCATION_SITES
3392 ? DONT_TRACK_ALLOCATION_SITE
3393 : AllocationSite::GetMode(kind);
3394}
3395
3396
3397HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
3398 ElementsKind kind,
3399 HValue* constructor_function) :
3400 builder_(builder),
3401 kind_(kind),
3402 mode_(DONT_TRACK_ALLOCATION_SITE),
3403 allocation_site_payload_(NULL),
3404 constructor_function_(constructor_function) {
3405}
3406
3407
3408HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
3409 if (!builder()->top_info()->IsStub()) {
3410 // A constant map is fine.
3411 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
3412 builder()->isolate());
3413 return builder()->Add<HConstant>(map);
3414 }
3415
3416 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
3417 // No need for a context lookup if the kind_ matches the initial
3418 // map, because we can just load the map in that case.
3419 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3420 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
3421 access);
3422 }
3423
3424 // TODO(mvstanton): we should always have a constructor function if we
3425 // are creating a stub.
3426 HInstruction* native_context = constructor_function_ != NULL
3427 ? builder()->BuildGetNativeContext(constructor_function_)
3428 : builder()->BuildGetNativeContext();
3429
3430 HObjectAccess access =
3431 HObjectAccess::ForContextSlot(Context::ArrayMapIndex(kind_));
3432 return builder()->Add<HLoadNamedField>(native_context, nullptr, access);
3433}
3434
3435
3436HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
3437 // Find the map near the constructor function
3438 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
3439 return builder()->Add<HLoadNamedField>(constructor_function_, nullptr,
3440 access);
3441}
3442
3443
3444HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
3445 HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
3446 return AllocateArray(capacity,
3447 capacity,
3448 builder()->graph()->GetConstant0());
3449}
3450
3451
3452HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3453 HValue* capacity,
3454 HConstant* capacity_upper_bound,
3455 HValue* length_field,
3456 FillMode fill_mode) {
3457 return AllocateArray(capacity,
3458 capacity_upper_bound->GetInteger32Constant(),
3459 length_field,
3460 fill_mode);
3461}
3462
3463
3464HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3465 HValue* capacity,
3466 int capacity_upper_bound,
3467 HValue* length_field,
3468 FillMode fill_mode) {
3469 HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant()
3470 ? HConstant::cast(capacity)
3471 : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound);
3472
3473 HAllocate* array = AllocateArray(capacity, length_field, fill_mode);
3474 if (!elements_location_->has_size_upper_bound()) {
3475 elements_location_->set_size_upper_bound(elememts_size_upper_bound);
3476 }
3477 return array;
3478}
3479
3480
3481HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray(
3482 HValue* capacity,
3483 HValue* length_field,
3484 FillMode fill_mode) {
3485 // These HForceRepresentations are because we store these as fields in the
3486 // objects we construct, and an int32-to-smi HChange could deopt. Accept
3487 // the deopt possibility now, before allocation occurs.
3488 capacity =
3489 builder()->AddUncasted<HForceRepresentation>(capacity,
3490 Representation::Smi());
3491 length_field =
3492 builder()->AddUncasted<HForceRepresentation>(length_field,
3493 Representation::Smi());
3494
3495 // Generate size calculation code here in order to make it dominate
3496 // the JSArray allocation.
3497 HValue* elements_size =
3498 builder()->BuildCalculateElementsSize(kind_, capacity);
3499
3500 // Bail out for large objects.
3501 HValue* max_regular_heap_object_size =
3502 builder()->Add<HConstant>(Page::kMaxRegularHeapObjectSize);
3503 builder()->Add<HBoundsCheck>(elements_size, max_regular_heap_object_size);
3504
3505 // Allocate (dealing with failure appropriately)
3506 HAllocate* array_object = builder()->AllocateJSArrayObject(mode_);
3507
3508 // Fill in the fields: map, properties, length
3509 HValue* map;
3510 if (allocation_site_payload_ == NULL) {
3511 map = EmitInternalMapCode();
3512 } else {
3513 map = EmitMapCode();
3514 }
3515
3516 builder()->BuildJSArrayHeader(array_object,
3517 map,
3518 NULL, // set elements to empty fixed array
3519 mode_,
3520 kind_,
3521 allocation_site_payload_,
3522 length_field);
3523
3524 // Allocate and initialize the elements
3525 elements_location_ = builder()->BuildAllocateElements(kind_, elements_size);
3526
3527 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
3528
3529 // Set the elements
3530 builder()->Add<HStoreNamedField>(
3531 array_object, HObjectAccess::ForElementsPointer(), elements_location_);
3532
3533 if (fill_mode == FILL_WITH_HOLE) {
3534 builder()->BuildFillElementsWithHole(elements_location_, kind_,
3535 graph()->GetConstant0(), capacity);
3536 }
3537
3538 return array_object;
3539}
3540
3541
3542HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) {
3543 HValue* native_context = BuildGetNativeContext();
3544 HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index);
3545 return Add<HLoadNamedField>(native_context, nullptr, function_access);
3546}
3547
3548
3549HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
3550 : HGraphBuilder(info),
3551 function_state_(NULL),
3552 initial_function_state_(this, info, NORMAL_RETURN, 0),
3553 ast_context_(NULL),
3554 break_scope_(NULL),
3555 inlined_count_(0),
3556 globals_(10, info->zone()),
3557 osr_(new(info->zone()) HOsrBuilder(this)) {
3558 // This is not initialized in the initializer list because the
3559 // constructor for the initial state relies on function_state_ == NULL
3560 // to know it's the initial state.
3561 function_state_ = &initial_function_state_;
3562 InitializeAstVisitor(info->isolate());
3563 if (top_info()->is_tracking_positions()) {
3564 SetSourcePosition(info->shared_info()->start_position());
3565 }
3566}
3567
3568
3569HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3570 HBasicBlock* second,
3571 BailoutId join_id) {
3572 if (first == NULL) {
3573 return second;
3574 } else if (second == NULL) {
3575 return first;
3576 } else {
3577 HBasicBlock* join_block = graph()->CreateBasicBlock();
3578 Goto(first, join_block);
3579 Goto(second, join_block);
3580 join_block->SetJoinId(join_id);
3581 return join_block;
3582 }
3583}
3584
3585
3586HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
3587 HBasicBlock* exit_block,
3588 HBasicBlock* continue_block) {
3589 if (continue_block != NULL) {
3590 if (exit_block != NULL) Goto(exit_block, continue_block);
3591 continue_block->SetJoinId(statement->ContinueId());
3592 return continue_block;
3593 }
3594 return exit_block;
3595}
3596
3597
3598HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3599 HBasicBlock* loop_entry,
3600 HBasicBlock* body_exit,
3601 HBasicBlock* loop_successor,
3602 HBasicBlock* break_block) {
3603 if (body_exit != NULL) Goto(body_exit, loop_entry);
3604 loop_entry->PostProcessLoopHeader(statement);
3605 if (break_block != NULL) {
3606 if (loop_successor != NULL) Goto(loop_successor, break_block);
3607 break_block->SetJoinId(statement->ExitId());
3608 return break_block;
3609 }
3610 return loop_successor;
3611}
3612
3613
3614// Build a new loop header block and set it as the current block.
3615HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3616 HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3617 Goto(loop_entry);
3618 set_current_block(loop_entry);
3619 return loop_entry;
3620}
3621
3622
3623HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3624 IterationStatement* statement) {
3625 HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
3626 ? osr()->BuildOsrLoopEntry(statement)
3627 : BuildLoopEntry();
3628 return loop_entry;
3629}
3630
3631
3632void HBasicBlock::FinishExit(HControlInstruction* instruction,
3633 SourcePosition position) {
3634 Finish(instruction, position);
3635 ClearEnvironment();
3636}
3637
3638
3639std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
3640 return os << "B" << b.block_id();
3641}
3642
3643
3644HGraph::HGraph(CompilationInfo* info)
3645 : isolate_(info->isolate()),
3646 next_block_id_(0),
3647 entry_block_(NULL),
3648 blocks_(8, info->zone()),
3649 values_(16, info->zone()),
3650 phi_list_(NULL),
3651 uint32_instructions_(NULL),
3652 osr_(NULL),
3653 info_(info),
3654 zone_(info->zone()),
3655 is_recursive_(false),
3656 use_optimistic_licm_(false),
3657 depends_on_empty_array_proto_elements_(false),
3658 type_change_checksum_(0),
3659 maximum_environment_size_(0),
3660 no_side_effects_scope_count_(0),
3661 disallow_adding_new_values_(false) {
3662 if (info->IsStub()) {
3663 CallInterfaceDescriptor descriptor =
3664 info->code_stub()->GetCallInterfaceDescriptor();
3665 start_environment_ =
3666 new (zone_) HEnvironment(zone_, descriptor.GetRegisterParameterCount());
3667 } else {
3668 if (info->is_tracking_positions()) {
3669 info->TraceInlinedFunction(info->shared_info(), SourcePosition::Unknown(),
3670 InlinedFunctionInfo::kNoParentId);
3671 }
3672 start_environment_ =
3673 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3674 }
3675 start_environment_->set_ast_id(BailoutId::FunctionContext());
3676 entry_block_ = CreateBasicBlock();
3677 entry_block_->SetInitialEnvironment(start_environment_);
3678}
3679
3680
3681HBasicBlock* HGraph::CreateBasicBlock() {
3682 HBasicBlock* result = new(zone()) HBasicBlock(this);
3683 blocks_.Add(result, zone());
3684 return result;
3685}
3686
3687
3688void HGraph::FinalizeUniqueness() {
3689 DisallowHeapAllocation no_gc;
3690 for (int i = 0; i < blocks()->length(); ++i) {
3691 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3692 it.Current()->FinalizeUniqueness();
3693 }
3694 }
3695}
3696
3697
3698int HGraph::SourcePositionToScriptPosition(SourcePosition pos) {
3699 return (FLAG_hydrogen_track_positions && !pos.IsUnknown())
3700 ? info()->start_position_for(pos.inlining_id()) + pos.position()
3701 : pos.raw();
3702}
3703
3704
3705// Block ordering was implemented with two mutually recursive methods,
3706// HGraph::Postorder and HGraph::PostorderLoopBlocks.
3707// The recursion could lead to stack overflow so the algorithm has been
3708// implemented iteratively.
3709// At a high level the algorithm looks like this:
3710//
3711// Postorder(block, loop_header) : {
3712// if (block has already been visited or is of another loop) return;
3713// mark block as visited;
3714// if (block is a loop header) {
3715// VisitLoopMembers(block, loop_header);
3716// VisitSuccessorsOfLoopHeader(block);
3717// } else {
3718// VisitSuccessors(block)
3719// }
3720// put block in result list;
3721// }
3722//
3723// VisitLoopMembers(block, outer_loop_header) {
3724// foreach (block b in block loop members) {
3725// VisitSuccessorsOfLoopMember(b, outer_loop_header);
3726// if (b is loop header) VisitLoopMembers(b);
3727// }
3728// }
3729//
3730// VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3731// foreach (block b in block successors) Postorder(b, outer_loop_header)
3732// }
3733//
3734// VisitSuccessorsOfLoopHeader(block) {
3735// foreach (block b in block successors) Postorder(b, block)
3736// }
3737//
3738// VisitSuccessors(block, loop_header) {
3739// foreach (block b in block successors) Postorder(b, loop_header)
3740// }
3741//
3742// The ordering is started calling Postorder(entry, NULL).
3743//
3744// Each instance of PostorderProcessor represents the "stack frame" of the
3745// recursion, and particularly keeps the state of the loop (iteration) of the
3746// "Visit..." function it represents.
3747// To recycle memory we keep all the frames in a double linked list but
3748// this means that we cannot use constructors to initialize the frames.
3749//
3750class PostorderProcessor : public ZoneObject {
3751 public:
3752 // Back link (towards the stack bottom).
3753 PostorderProcessor* parent() {return father_; }
3754 // Forward link (towards the stack top).
3755 PostorderProcessor* child() {return child_; }
3756 HBasicBlock* block() { return block_; }
3757 HLoopInformation* loop() { return loop_; }
3758 HBasicBlock* loop_header() { return loop_header_; }
3759
3760 static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3761 HBasicBlock* block) {
3762 PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3763 return result->SetupSuccessors(zone, block, NULL);
3764 }
3765
3766 PostorderProcessor* PerformStep(Zone* zone,
3767 ZoneList<HBasicBlock*>* order) {
3768 PostorderProcessor* next =
3769 PerformNonBacktrackingStep(zone, order);
3770 if (next != NULL) {
3771 return next;
3772 } else {
3773 return Backtrack(zone, order);
3774 }
3775 }
3776
3777 private:
3778 explicit PostorderProcessor(PostorderProcessor* father)
3779 : father_(father), child_(NULL), successor_iterator(NULL) { }
3780
3781 // Each enum value states the cycle whose state is kept by this instance.
3782 enum LoopKind {
3783 NONE,
3784 SUCCESSORS,
3785 SUCCESSORS_OF_LOOP_HEADER,
3786 LOOP_MEMBERS,
3787 SUCCESSORS_OF_LOOP_MEMBER
3788 };
3789
3790 // Each "Setup..." method is like a constructor for a cycle state.
3791 PostorderProcessor* SetupSuccessors(Zone* zone,
3792 HBasicBlock* block,
3793 HBasicBlock* loop_header) {
3794 if (block == NULL || block->IsOrdered() ||
3795 block->parent_loop_header() != loop_header) {
3796 kind_ = NONE;
3797 block_ = NULL;
3798 loop_ = NULL;
3799 loop_header_ = NULL;
3800 return this;
3801 } else {
3802 block_ = block;
3803 loop_ = NULL;
3804 block->MarkAsOrdered();
3805
3806 if (block->IsLoopHeader()) {
3807 kind_ = SUCCESSORS_OF_LOOP_HEADER;
3808 loop_header_ = block;
3809 InitializeSuccessors();
3810 PostorderProcessor* result = Push(zone);
3811 return result->SetupLoopMembers(zone, block, block->loop_information(),
3812 loop_header);
3813 } else {
3814 DCHECK(block->IsFinished());
3815 kind_ = SUCCESSORS;
3816 loop_header_ = loop_header;
3817 InitializeSuccessors();
3818 return this;
3819 }
3820 }
3821 }
3822
3823 PostorderProcessor* SetupLoopMembers(Zone* zone,
3824 HBasicBlock* block,
3825 HLoopInformation* loop,
3826 HBasicBlock* loop_header) {
3827 kind_ = LOOP_MEMBERS;
3828 block_ = block;
3829 loop_ = loop;
3830 loop_header_ = loop_header;
3831 InitializeLoopMembers();
3832 return this;
3833 }
3834
3835 PostorderProcessor* SetupSuccessorsOfLoopMember(
3836 HBasicBlock* block,
3837 HLoopInformation* loop,
3838 HBasicBlock* loop_header) {
3839 kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3840 block_ = block;
3841 loop_ = loop;
3842 loop_header_ = loop_header;
3843 InitializeSuccessors();
3844 return this;
3845 }
3846
3847 // This method "allocates" a new stack frame.
3848 PostorderProcessor* Push(Zone* zone) {
3849 if (child_ == NULL) {
3850 child_ = new(zone) PostorderProcessor(this);
3851 }
3852 return child_;
3853 }
3854
3855 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3856 DCHECK(block_->end()->FirstSuccessor() == NULL ||
3857 order->Contains(block_->end()->FirstSuccessor()) ||
3858 block_->end()->FirstSuccessor()->IsLoopHeader());
3859 DCHECK(block_->end()->SecondSuccessor() == NULL ||
3860 order->Contains(block_->end()->SecondSuccessor()) ||
3861 block_->end()->SecondSuccessor()->IsLoopHeader());
3862 order->Add(block_, zone);
3863 }
3864
3865 // This method is the basic block to walk up the stack.
3866 PostorderProcessor* Pop(Zone* zone,
3867 ZoneList<HBasicBlock*>* order) {
3868 switch (kind_) {
3869 case SUCCESSORS:
3870 case SUCCESSORS_OF_LOOP_HEADER:
3871 ClosePostorder(order, zone);
3872 return father_;
3873 case LOOP_MEMBERS:
3874 return father_;
3875 case SUCCESSORS_OF_LOOP_MEMBER:
3876 if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3877 // In this case we need to perform a LOOP_MEMBERS cycle so we
3878 // initialize it and return this instead of father.
3879 return SetupLoopMembers(zone, block(),
3880 block()->loop_information(), loop_header_);
3881 } else {
3882 return father_;
3883 }
3884 case NONE:
3885 return father_;
3886 }
3887 UNREACHABLE();
3888 return NULL;
3889 }
3890
3891 // Walks up the stack.
3892 PostorderProcessor* Backtrack(Zone* zone,
3893 ZoneList<HBasicBlock*>* order) {
3894 PostorderProcessor* parent = Pop(zone, order);
3895 while (parent != NULL) {
3896 PostorderProcessor* next =
3897 parent->PerformNonBacktrackingStep(zone, order);
3898 if (next != NULL) {
3899 return next;
3900 } else {
3901 parent = parent->Pop(zone, order);
3902 }
3903 }
3904 return NULL;
3905 }
3906
3907 PostorderProcessor* PerformNonBacktrackingStep(
3908 Zone* zone,
3909 ZoneList<HBasicBlock*>* order) {
3910 HBasicBlock* next_block;
3911 switch (kind_) {
3912 case SUCCESSORS:
3913 next_block = AdvanceSuccessors();
3914 if (next_block != NULL) {
3915 PostorderProcessor* result = Push(zone);
3916 return result->SetupSuccessors(zone, next_block, loop_header_);
3917 }
3918 break;
3919 case SUCCESSORS_OF_LOOP_HEADER:
3920 next_block = AdvanceSuccessors();
3921 if (next_block != NULL) {
3922 PostorderProcessor* result = Push(zone);
3923 return result->SetupSuccessors(zone, next_block, block());
3924 }
3925 break;
3926 case LOOP_MEMBERS:
3927 next_block = AdvanceLoopMembers();
3928 if (next_block != NULL) {
3929 PostorderProcessor* result = Push(zone);
3930 return result->SetupSuccessorsOfLoopMember(next_block,
3931 loop_, loop_header_);
3932 }
3933 break;
3934 case SUCCESSORS_OF_LOOP_MEMBER:
3935 next_block = AdvanceSuccessors();
3936 if (next_block != NULL) {
3937 PostorderProcessor* result = Push(zone);
3938 return result->SetupSuccessors(zone, next_block, loop_header_);
3939 }
3940 break;
3941 case NONE:
3942 return NULL;
3943 }
3944 return NULL;
3945 }
3946
3947 // The following two methods implement a "foreach b in successors" cycle.
3948 void InitializeSuccessors() {
3949 loop_index = 0;
3950 loop_length = 0;
3951 successor_iterator = HSuccessorIterator(block_->end());
3952 }
3953
3954 HBasicBlock* AdvanceSuccessors() {
3955 if (!successor_iterator.Done()) {
3956 HBasicBlock* result = successor_iterator.Current();
3957 successor_iterator.Advance();
3958 return result;
3959 }
3960 return NULL;
3961 }
3962
3963 // The following two methods implement a "foreach b in loop members" cycle.
3964 void InitializeLoopMembers() {
3965 loop_index = 0;
3966 loop_length = loop_->blocks()->length();
3967 }
3968
3969 HBasicBlock* AdvanceLoopMembers() {
3970 if (loop_index < loop_length) {
3971 HBasicBlock* result = loop_->blocks()->at(loop_index);
3972 loop_index++;
3973 return result;
3974 } else {
3975 return NULL;
3976 }
3977 }
3978
3979 LoopKind kind_;
3980 PostorderProcessor* father_;
3981 PostorderProcessor* child_;
3982 HLoopInformation* loop_;
3983 HBasicBlock* block_;
3984 HBasicBlock* loop_header_;
3985 int loop_index;
3986 int loop_length;
3987 HSuccessorIterator successor_iterator;
3988};
3989
3990
3991void HGraph::OrderBlocks() {
3992 CompilationPhase phase("H_Block ordering", info());
3993
3994#ifdef DEBUG
3995 // Initially the blocks must not be ordered.
3996 for (int i = 0; i < blocks_.length(); ++i) {
3997 DCHECK(!blocks_[i]->IsOrdered());
3998 }
3999#endif
4000
4001 PostorderProcessor* postorder =
4002 PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
4003 blocks_.Rewind(0);
4004 while (postorder) {
4005 postorder = postorder->PerformStep(zone(), &blocks_);
4006 }
4007
4008#ifdef DEBUG
4009 // Now all blocks must be marked as ordered.
4010 for (int i = 0; i < blocks_.length(); ++i) {
4011 DCHECK(blocks_[i]->IsOrdered());
4012 }
4013#endif
4014
4015 // Reverse block list and assign block IDs.
4016 for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
4017 HBasicBlock* bi = blocks_[i];
4018 HBasicBlock* bj = blocks_[j];
4019 bi->set_block_id(j);
4020 bj->set_block_id(i);
4021 blocks_[i] = bj;
4022 blocks_[j] = bi;
4023 }
4024}
4025
4026
4027void HGraph::AssignDominators() {
4028 HPhase phase("H_Assign dominators", this);
4029 for (int i = 0; i < blocks_.length(); ++i) {
4030 HBasicBlock* block = blocks_[i];
4031 if (block->IsLoopHeader()) {
4032 // Only the first predecessor of a loop header is from outside the loop.
4033 // All others are back edges, and thus cannot dominate the loop header.
4034 block->AssignCommonDominator(block->predecessors()->first());
4035 block->AssignLoopSuccessorDominators();
4036 } else {
4037 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
4038 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
4039 }
4040 }
4041 }
4042}
4043
4044
4045bool HGraph::CheckArgumentsPhiUses() {
4046 int block_count = blocks_.length();
4047 for (int i = 0; i < block_count; ++i) {
4048 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4049 HPhi* phi = blocks_[i]->phis()->at(j);
4050 // We don't support phi uses of arguments for now.
4051 if (phi->CheckFlag(HValue::kIsArguments)) return false;
4052 }
4053 }
4054 return true;
4055}
4056
4057
4058bool HGraph::CheckConstPhiUses() {
4059 int block_count = blocks_.length();
4060 for (int i = 0; i < block_count; ++i) {
4061 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4062 HPhi* phi = blocks_[i]->phis()->at(j);
4063 // Check for the hole value (from an uninitialized const).
4064 for (int k = 0; k < phi->OperandCount(); k++) {
4065 if (phi->OperandAt(k) == GetConstantHole()) return false;
4066 }
4067 }
4068 }
4069 return true;
4070}
4071
4072
4073void HGraph::CollectPhis() {
4074 int block_count = blocks_.length();
4075 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
4076 for (int i = 0; i < block_count; ++i) {
4077 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
4078 HPhi* phi = blocks_[i]->phis()->at(j);
4079 phi_list_->Add(phi, zone());
4080 }
4081 }
4082}
4083
4084
4085// Implementation of utility class to encapsulate the translation state for
4086// a (possibly inlined) function.
4087FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
4088 CompilationInfo* info, InliningKind inlining_kind,
4089 int inlining_id)
4090 : owner_(owner),
4091 compilation_info_(info),
4092 call_context_(NULL),
4093 inlining_kind_(inlining_kind),
4094 function_return_(NULL),
4095 test_context_(NULL),
4096 entry_(NULL),
4097 arguments_object_(NULL),
4098 arguments_elements_(NULL),
4099 inlining_id_(inlining_id),
4100 outer_source_position_(SourcePosition::Unknown()),
4101 outer_(owner->function_state()) {
4102 if (outer_ != NULL) {
4103 // State for an inline function.
4104 if (owner->ast_context()->IsTest()) {
4105 HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
4106 HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
4107 if_true->MarkAsInlineReturnTarget(owner->current_block());
4108 if_false->MarkAsInlineReturnTarget(owner->current_block());
4109 TestContext* outer_test_context = TestContext::cast(owner->ast_context());
4110 Expression* cond = outer_test_context->condition();
4111 // The AstContext constructor pushed on the context stack. This newed
4112 // instance is the reason that AstContext can't be BASE_EMBEDDED.
4113 test_context_ = new TestContext(owner, cond, if_true, if_false);
4114 } else {
4115 function_return_ = owner->graph()->CreateBasicBlock();
4116 function_return()->MarkAsInlineReturnTarget(owner->current_block());
4117 }
4118 // Set this after possibly allocating a new TestContext above.
4119 call_context_ = owner->ast_context();
4120 }
4121
4122 // Push on the state stack.
4123 owner->set_function_state(this);
4124
4125 if (compilation_info_->is_tracking_positions()) {
4126 outer_source_position_ = owner->source_position();
4127 owner->EnterInlinedSource(
4128 info->shared_info()->start_position(),
4129 inlining_id);
4130 owner->SetSourcePosition(info->shared_info()->start_position());
4131 }
4132}
4133
4134
4135FunctionState::~FunctionState() {
4136 delete test_context_;
4137 owner_->set_function_state(outer_);
4138
4139 if (compilation_info_->is_tracking_positions()) {
4140 owner_->set_source_position(outer_source_position_);
4141 owner_->EnterInlinedSource(
4142 outer_->compilation_info()->shared_info()->start_position(),
4143 outer_->inlining_id());
4144 }
4145}
4146
4147
4148// Implementation of utility classes to represent an expression's context in
4149// the AST.
4150AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
4151 : owner_(owner),
4152 kind_(kind),
4153 outer_(owner->ast_context()),
4154 typeof_mode_(NOT_INSIDE_TYPEOF) {
4155 owner->set_ast_context(this); // Push.
4156#ifdef DEBUG
4157 DCHECK(owner->environment()->frame_type() == JS_FUNCTION);
4158 original_length_ = owner->environment()->length();
4159#endif
4160}
4161
4162
4163AstContext::~AstContext() {
4164 owner_->set_ast_context(outer_); // Pop.
4165}
4166
4167
4168EffectContext::~EffectContext() {
4169 DCHECK(owner()->HasStackOverflow() ||
4170 owner()->current_block() == NULL ||
4171 (owner()->environment()->length() == original_length_ &&
4172 owner()->environment()->frame_type() == JS_FUNCTION));
4173}
4174
4175
4176ValueContext::~ValueContext() {
4177 DCHECK(owner()->HasStackOverflow() ||
4178 owner()->current_block() == NULL ||
4179 (owner()->environment()->length() == original_length_ + 1 &&
4180 owner()->environment()->frame_type() == JS_FUNCTION));
4181}
4182
4183
4184void EffectContext::ReturnValue(HValue* value) {
4185 // The value is simply ignored.
4186}
4187
4188
4189void ValueContext::ReturnValue(HValue* value) {
4190 // The value is tracked in the bailout environment, and communicated
4191 // through the environment as the result of the expression.
4192 if (value->CheckFlag(HValue::kIsArguments)) {
4193 if (flag_ == ARGUMENTS_FAKED) {
4194 value = owner()->graph()->GetConstantUndefined();
4195 } else if (!arguments_allowed()) {
4196 owner()->Bailout(kBadValueContextForArgumentsValue);
4197 }
4198 }
4199 owner()->Push(value);
4200}
4201
4202
4203void TestContext::ReturnValue(HValue* value) {
4204 BuildBranch(value);
4205}
4206
4207
4208void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4209 DCHECK(!instr->IsControlInstruction());
4210 owner()->AddInstruction(instr);
4211 if (instr->HasObservableSideEffects()) {
4212 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4213 }
4214}
4215
4216
4217void EffectContext::ReturnControl(HControlInstruction* instr,
4218 BailoutId ast_id) {
4219 DCHECK(!instr->HasObservableSideEffects());
4220 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4221 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4222 instr->SetSuccessorAt(0, empty_true);
4223 instr->SetSuccessorAt(1, empty_false);
4224 owner()->FinishCurrentBlock(instr);
4225 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
4226 owner()->set_current_block(join);
4227}
4228
4229
4230void EffectContext::ReturnContinuation(HIfContinuation* continuation,
4231 BailoutId ast_id) {
4232 HBasicBlock* true_branch = NULL;
4233 HBasicBlock* false_branch = NULL;
4234 continuation->Continue(&true_branch, &false_branch);
4235 if (!continuation->IsTrueReachable()) {
4236 owner()->set_current_block(false_branch);
4237 } else if (!continuation->IsFalseReachable()) {
4238 owner()->set_current_block(true_branch);
4239 } else {
4240 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
4241 owner()->set_current_block(join);
4242 }
4243}
4244
4245
4246void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4247 DCHECK(!instr->IsControlInstruction());
4248 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4249 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4250 }
4251 owner()->AddInstruction(instr);
4252 owner()->Push(instr);
4253 if (instr->HasObservableSideEffects()) {
4254 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4255 }
4256}
4257
4258
4259void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4260 DCHECK(!instr->HasObservableSideEffects());
4261 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
4262 return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
4263 }
4264 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
4265 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
4266 instr->SetSuccessorAt(0, materialize_true);
4267 instr->SetSuccessorAt(1, materialize_false);
4268 owner()->FinishCurrentBlock(instr);
4269 owner()->set_current_block(materialize_true);
4270 owner()->Push(owner()->graph()->GetConstantTrue());
4271 owner()->set_current_block(materialize_false);
4272 owner()->Push(owner()->graph()->GetConstantFalse());
4273 HBasicBlock* join =
4274 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4275 owner()->set_current_block(join);
4276}
4277
4278
4279void ValueContext::ReturnContinuation(HIfContinuation* continuation,
4280 BailoutId ast_id) {
4281 HBasicBlock* materialize_true = NULL;
4282 HBasicBlock* materialize_false = NULL;
4283 continuation->Continue(&materialize_true, &materialize_false);
4284 if (continuation->IsTrueReachable()) {
4285 owner()->set_current_block(materialize_true);
4286 owner()->Push(owner()->graph()->GetConstantTrue());
4287 owner()->set_current_block(materialize_true);
4288 }
4289 if (continuation->IsFalseReachable()) {
4290 owner()->set_current_block(materialize_false);
4291 owner()->Push(owner()->graph()->GetConstantFalse());
4292 owner()->set_current_block(materialize_false);
4293 }
4294 if (continuation->TrueAndFalseReachable()) {
4295 HBasicBlock* join =
4296 owner()->CreateJoin(materialize_true, materialize_false, ast_id);
4297 owner()->set_current_block(join);
4298 }
4299}
4300
4301
4302void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
4303 DCHECK(!instr->IsControlInstruction());
4304 HOptimizedGraphBuilder* builder = owner();
4305 builder->AddInstruction(instr);
4306 // We expect a simulate after every expression with side effects, though
4307 // this one isn't actually needed (and wouldn't work if it were targeted).
4308 if (instr->HasObservableSideEffects()) {
4309 builder->Push(instr);
4310 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
4311 builder->Pop();
4312 }
4313 BuildBranch(instr);
4314}
4315
4316
4317void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
4318 DCHECK(!instr->HasObservableSideEffects());
4319 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
4320 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
4321 instr->SetSuccessorAt(0, empty_true);
4322 instr->SetSuccessorAt(1, empty_false);
4323 owner()->FinishCurrentBlock(instr);
4324 owner()->Goto(empty_true, if_true(), owner()->function_state());
4325 owner()->Goto(empty_false, if_false(), owner()->function_state());
4326 owner()->set_current_block(NULL);
4327}
4328
4329
4330void TestContext::ReturnContinuation(HIfContinuation* continuation,
4331 BailoutId ast_id) {
4332 HBasicBlock* true_branch = NULL;
4333 HBasicBlock* false_branch = NULL;
4334 continuation->Continue(&true_branch, &false_branch);
4335 if (continuation->IsTrueReachable()) {
4336 owner()->Goto(true_branch, if_true(), owner()->function_state());
4337 }
4338 if (continuation->IsFalseReachable()) {
4339 owner()->Goto(false_branch, if_false(), owner()->function_state());
4340 }
4341 owner()->set_current_block(NULL);
4342}
4343
4344
4345void TestContext::BuildBranch(HValue* value) {
4346 // We expect the graph to be in edge-split form: there is no edge that
4347 // connects a branch node to a join node. We conservatively ensure that
4348 // property by always adding an empty block on the outgoing edges of this
4349 // branch.
4350 HOptimizedGraphBuilder* builder = owner();
4351 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
4352 builder->Bailout(kArgumentsObjectValueInATestContext);
4353 }
4354 ToBooleanStub::Types expected(condition()->to_boolean_types());
4355 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
4356}
4357
4358
4359// HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
4360#define CHECK_BAILOUT(call) \
4361 do { \
4362 call; \
4363 if (HasStackOverflow()) return; \
4364 } while (false)
4365
4366
4367#define CHECK_ALIVE(call) \
4368 do { \
4369 call; \
4370 if (HasStackOverflow() || current_block() == NULL) return; \
4371 } while (false)
4372
4373
4374#define CHECK_ALIVE_OR_RETURN(call, value) \
4375 do { \
4376 call; \
4377 if (HasStackOverflow() || current_block() == NULL) return value; \
4378 } while (false)
4379
4380
4381void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
4382 current_info()->AbortOptimization(reason);
4383 SetStackOverflow();
4384}
4385
4386
4387void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
4388 EffectContext for_effect(this);
4389 Visit(expr);
4390}
4391
4392
4393void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
4394 ArgumentsAllowedFlag flag) {
4395 ValueContext for_value(this, flag);
4396 Visit(expr);
4397}
4398
4399
4400void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
4401 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
4402 for_value.set_typeof_mode(INSIDE_TYPEOF);
4403 Visit(expr);
4404}
4405
4406
4407void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
4408 HBasicBlock* true_block,
4409 HBasicBlock* false_block) {
4410 TestContext for_control(this, expr, true_block, false_block);
4411 Visit(expr);
4412}
4413
4414
4415void HOptimizedGraphBuilder::VisitExpressions(
4416 ZoneList<Expression*>* exprs) {
4417 for (int i = 0; i < exprs->length(); ++i) {
4418 CHECK_ALIVE(VisitForValue(exprs->at(i)));
4419 }
4420}
4421
4422
4423void HOptimizedGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs,
4424 ArgumentsAllowedFlag flag) {
4425 for (int i = 0; i < exprs->length(); ++i) {
4426 CHECK_ALIVE(VisitForValue(exprs->at(i), flag));
4427 }
4428}
4429
4430
4431bool HOptimizedGraphBuilder::BuildGraph() {
4432 if (IsSubclassConstructor(current_info()->literal()->kind())) {
4433 Bailout(kSuperReference);
4434 return false;
4435 }
4436
4437 Scope* scope = current_info()->scope();
4438 SetUpScope(scope);
4439
4440 // Add an edge to the body entry. This is warty: the graph's start
4441 // environment will be used by the Lithium translation as the initial
4442 // environment on graph entry, but it has now been mutated by the
4443 // Hydrogen translation of the instructions in the start block. This
4444 // environment uses values which have not been defined yet. These
4445 // Hydrogen instructions will then be replayed by the Lithium
4446 // translation, so they cannot have an environment effect. The edge to
4447 // the body's entry block (along with some special logic for the start
4448 // block in HInstruction::InsertAfter) seals the start block from
4449 // getting unwanted instructions inserted.
4450 //
4451 // TODO(kmillikin): Fix this. Stop mutating the initial environment.
4452 // Make the Hydrogen instructions in the initial block into Hydrogen
4453 // values (but not instructions), present in the initial environment and
4454 // not replayed by the Lithium translation.
4455 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4456 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4457 Goto(body_entry);
4458 body_entry->SetJoinId(BailoutId::FunctionEntry());
4459 set_current_block(body_entry);
4460
4461 VisitDeclarations(scope->declarations());
4462 Add<HSimulate>(BailoutId::Declarations());
4463
4464 Add<HStackCheck>(HStackCheck::kFunctionEntry);
4465
4466 VisitStatements(current_info()->literal()->body());
4467 if (HasStackOverflow()) return false;
4468
4469 if (current_block() != NULL) {
4470 Add<HReturn>(graph()->GetConstantUndefined());
4471 set_current_block(NULL);
4472 }
4473
4474 // If the checksum of the number of type info changes is the same as the
4475 // last time this function was compiled, then this recompile is likely not
4476 // due to missing/inadequate type feedback, but rather too aggressive
4477 // optimization. Disable optimistic LICM in that case.
4478 Handle<Code> unoptimized_code(current_info()->shared_info()->code());
4479 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
4480 Handle<TypeFeedbackInfo> type_info(
4481 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4482 int checksum = type_info->own_type_change_checksum();
4483 int composite_checksum = graph()->update_type_change_checksum(checksum);
4484 graph()->set_use_optimistic_licm(
4485 !type_info->matches_inlined_type_change_checksum(composite_checksum));
4486 type_info->set_inlined_type_change_checksum(composite_checksum);
4487
4488 // Perform any necessary OSR-specific cleanups or changes to the graph.
4489 osr()->FinishGraph();
4490
4491 return true;
4492}
4493
4494
4495bool HGraph::Optimize(BailoutReason* bailout_reason) {
4496 OrderBlocks();
4497 AssignDominators();
4498
4499 // We need to create a HConstant "zero" now so that GVN will fold every
4500 // zero-valued constant in the graph together.
4501 // The constant is needed to make idef-based bounds check work: the pass
4502 // evaluates relations with "zero" and that zero cannot be created after GVN.
4503 GetConstant0();
4504
4505#ifdef DEBUG
4506 // Do a full verify after building the graph and computing dominators.
4507 Verify(true);
4508#endif
4509
4510 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4511 Run<HEnvironmentLivenessAnalysisPhase>();
4512 }
4513
4514 if (!CheckConstPhiUses()) {
4515 *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4516 return false;
4517 }
4518 Run<HRedundantPhiEliminationPhase>();
4519 if (!CheckArgumentsPhiUses()) {
4520 *bailout_reason = kUnsupportedPhiUseOfArguments;
4521 return false;
4522 }
4523
4524 // Find and mark unreachable code to simplify optimizations, especially gvn,
4525 // where unreachable code could unnecessarily defeat LICM.
4526 Run<HMarkUnreachableBlocksPhase>();
4527
4528 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4529 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4530
4531 if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4532
4533 CollectPhis();
4534
4535 if (has_osr()) osr()->FinishOsrValues();
4536
4537 Run<HInferRepresentationPhase>();
4538
4539 // Remove HSimulate instructions that have turned out not to be needed
4540 // after all by folding them into the following HSimulate.
4541 // This must happen after inferring representations.
4542 Run<HMergeRemovableSimulatesPhase>();
4543
4544 Run<HMarkDeoptimizeOnUndefinedPhase>();
4545 Run<HRepresentationChangesPhase>();
4546
4547 Run<HInferTypesPhase>();
4548
4549 // Must be performed before canonicalization to ensure that Canonicalize
4550 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4551 // zero.
4552 Run<HUint32AnalysisPhase>();
4553
4554 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4555
4556 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4557
4558 if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4559
4560 if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4561
4562 Run<HRangeAnalysisPhase>();
4563
4564 Run<HComputeChangeUndefinedToNaN>();
4565
4566 // Eliminate redundant stack checks on backwards branches.
4567 Run<HStackCheckEliminationPhase>();
4568
4569 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4570 if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
4571 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4572 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4573
4574 RestoreActualValues();
4575
4576 // Find unreachable code a second time, GVN and other optimizations may have
4577 // made blocks unreachable that were previously reachable.
4578 Run<HMarkUnreachableBlocksPhase>();
4579
4580 return true;
4581}
4582
4583
4584void HGraph::RestoreActualValues() {
4585 HPhase phase("H_Restore actual values", this);
4586
4587 for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4588 HBasicBlock* block = blocks()->at(block_index);
4589
4590#ifdef DEBUG
4591 for (int i = 0; i < block->phis()->length(); i++) {
4592 HPhi* phi = block->phis()->at(i);
4593 DCHECK(phi->ActualValue() == phi);
4594 }
4595#endif
4596
4597 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4598 HInstruction* instruction = it.Current();
4599 if (instruction->ActualValue() == instruction) continue;
4600 if (instruction->CheckFlag(HValue::kIsDead)) {
4601 // The instruction was marked as deleted but left in the graph
4602 // as a control flow dependency point for subsequent
4603 // instructions.
4604 instruction->DeleteAndReplaceWith(instruction->ActualValue());
4605 } else {
4606 DCHECK(instruction->IsInformativeDefinition());
4607 if (instruction->IsPurelyInformativeDefinition()) {
4608 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4609 } else {
4610 instruction->ReplaceAllUsesWith(instruction->ActualValue());
4611 }
4612 }
4613 }
4614 }
4615}
4616
4617
4618void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4619 ZoneList<HValue*> arguments(count, zone());
4620 for (int i = 0; i < count; ++i) {
4621 arguments.Add(Pop(), zone());
4622 }
4623
4624 HPushArguments* push_args = New<HPushArguments>();
4625 while (!arguments.is_empty()) {
4626 push_args->AddInput(arguments.RemoveLast());
4627 }
4628 AddInstruction(push_args);
4629}
4630
4631
4632template <class Instruction>
4633HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4634 PushArgumentsFromEnvironment(call->argument_count());
4635 return call;
4636}
4637
4638
4639void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
4640 HEnvironment* prolog_env = environment();
4641 int parameter_count = environment()->parameter_count();
4642 ZoneList<HValue*> parameters(parameter_count, zone());
4643 for (int i = 0; i < parameter_count; ++i) {
4644 HInstruction* parameter = Add<HParameter>(static_cast<unsigned>(i));
4645 parameters.Add(parameter, zone());
4646 environment()->Bind(i, parameter);
4647 }
4648
4649 HConstant* undefined_constant = graph()->GetConstantUndefined();
4650 // Initialize specials and locals to undefined.
4651 for (int i = parameter_count + 1; i < environment()->length(); ++i) {
4652 environment()->Bind(i, undefined_constant);
4653 }
4654 Add<HPrologue>();
4655
4656 HEnvironment* initial_env = environment()->CopyWithoutHistory();
4657 HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4658 GotoNoSimulate(body_entry);
4659 set_current_block(body_entry);
4660
4661 // Initialize context of prolog environment to undefined.
4662 prolog_env->BindContext(undefined_constant);
4663
4664 // First special is HContext.
4665 HInstruction* context = Add<HContext>();
4666 environment()->BindContext(context);
4667
4668 // Create an arguments object containing the initial parameters. Set the
4669 // initial values of parameters including "this" having parameter index 0.
4670 DCHECK_EQ(scope->num_parameters() + 1, parameter_count);
4671 HArgumentsObject* arguments_object = New<HArgumentsObject>(parameter_count);
4672 for (int i = 0; i < parameter_count; ++i) {
4673 HValue* parameter = parameters.at(i);
4674 arguments_object->AddArgument(parameter, zone());
4675 }
4676
4677 AddInstruction(arguments_object);
4678 graph()->SetArgumentsObject(arguments_object);
4679
4680 // Handle the arguments and arguments shadow variables specially (they do
4681 // not have declarations).
4682 if (scope->arguments() != NULL) {
4683 environment()->Bind(scope->arguments(), graph()->GetArgumentsObject());
4684 }
4685
4686 int rest_index;
4687 Variable* rest = scope->rest_parameter(&rest_index);
4688 if (rest) {
4689 return Bailout(kRestParameter);
4690 }
4691
4692 if (scope->this_function_var() != nullptr ||
4693 scope->new_target_var() != nullptr) {
4694 return Bailout(kSuperReference);
4695 }
4696
4697 // Trace the call.
4698 if (FLAG_trace && top_info()->IsOptimizing()) {
4699 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kTraceEnter), 0);
4700 }
4701}
4702
4703
4704void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4705 for (int i = 0; i < statements->length(); i++) {
4706 Statement* stmt = statements->at(i);
4707 CHECK_ALIVE(Visit(stmt));
4708 if (stmt->IsJump()) break;
4709 }
4710}
4711
4712
4713void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4714 DCHECK(!HasStackOverflow());
4715 DCHECK(current_block() != NULL);
4716 DCHECK(current_block()->HasPredecessor());
4717
4718 Scope* outer_scope = scope();
4719 Scope* scope = stmt->scope();
4720 BreakAndContinueInfo break_info(stmt, outer_scope);
4721
4722 { BreakAndContinueScope push(&break_info, this);
4723 if (scope != NULL) {
4724 if (scope->NeedsContext()) {
4725 // Load the function object.
4726 Scope* declaration_scope = scope->DeclarationScope();
4727 HInstruction* function;
4728 HValue* outer_context = environment()->context();
4729 if (declaration_scope->is_script_scope() ||
4730 declaration_scope->is_eval_scope()) {
4731 function = new (zone())
4732 HLoadContextSlot(outer_context, Context::CLOSURE_INDEX,
4733 HLoadContextSlot::kNoCheck);
4734 } else {
4735 function = New<HThisFunction>();
4736 }
4737 AddInstruction(function);
4738 // Allocate a block context and store it to the stack frame.
4739 HInstruction* inner_context = Add<HAllocateBlockContext>(
4740 outer_context, function, scope->GetScopeInfo(isolate()));
4741 HInstruction* instr = Add<HStoreFrameContext>(inner_context);
4742 set_scope(scope);
4743 environment()->BindContext(inner_context);
4744 if (instr->HasObservableSideEffects()) {
4745 AddSimulate(stmt->EntryId(), REMOVABLE_SIMULATE);
4746 }
4747 }
4748 VisitDeclarations(scope->declarations());
4749 AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4750 }
4751 CHECK_BAILOUT(VisitStatements(stmt->statements()));
4752 }
4753 set_scope(outer_scope);
4754 if (scope != NULL && current_block() != NULL &&
4755 scope->ContextLocalCount() > 0) {
4756 HValue* inner_context = environment()->context();
4757 HValue* outer_context = Add<HLoadNamedField>(
4758 inner_context, nullptr,
4759 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4760
4761 HInstruction* instr = Add<HStoreFrameContext>(outer_context);
4762 environment()->BindContext(outer_context);
4763 if (instr->HasObservableSideEffects()) {
4764 AddSimulate(stmt->ExitId(), REMOVABLE_SIMULATE);
4765 }
4766 }
4767 HBasicBlock* break_block = break_info.break_block();
4768 if (break_block != NULL) {
4769 if (current_block() != NULL) Goto(break_block);
4770 break_block->SetJoinId(stmt->ExitId());
4771 set_current_block(break_block);
4772 }
4773}
4774
4775
4776void HOptimizedGraphBuilder::VisitExpressionStatement(
4777 ExpressionStatement* stmt) {
4778 DCHECK(!HasStackOverflow());
4779 DCHECK(current_block() != NULL);
4780 DCHECK(current_block()->HasPredecessor());
4781 VisitForEffect(stmt->expression());
4782}
4783
4784
4785void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4786 DCHECK(!HasStackOverflow());
4787 DCHECK(current_block() != NULL);
4788 DCHECK(current_block()->HasPredecessor());
4789}
4790
4791
4792void HOptimizedGraphBuilder::VisitSloppyBlockFunctionStatement(
4793 SloppyBlockFunctionStatement* stmt) {
4794 Visit(stmt->statement());
4795}
4796
4797
4798void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4799 DCHECK(!HasStackOverflow());
4800 DCHECK(current_block() != NULL);
4801 DCHECK(current_block()->HasPredecessor());
4802 if (stmt->condition()->ToBooleanIsTrue()) {
4803 Add<HSimulate>(stmt->ThenId());
4804 Visit(stmt->then_statement());
4805 } else if (stmt->condition()->ToBooleanIsFalse()) {
4806 Add<HSimulate>(stmt->ElseId());
4807 Visit(stmt->else_statement());
4808 } else {
4809 HBasicBlock* cond_true = graph()->CreateBasicBlock();
4810 HBasicBlock* cond_false = graph()->CreateBasicBlock();
4811 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4812
4813 if (cond_true->HasPredecessor()) {
4814 cond_true->SetJoinId(stmt->ThenId());
4815 set_current_block(cond_true);
4816 CHECK_BAILOUT(Visit(stmt->then_statement()));
4817 cond_true = current_block();
4818 } else {
4819 cond_true = NULL;
4820 }
4821
4822 if (cond_false->HasPredecessor()) {
4823 cond_false->SetJoinId(stmt->ElseId());
4824 set_current_block(cond_false);
4825 CHECK_BAILOUT(Visit(stmt->else_statement()));
4826 cond_false = current_block();
4827 } else {
4828 cond_false = NULL;
4829 }
4830
4831 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4832 set_current_block(join);
4833 }
4834}
4835
4836
4837HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4838 BreakableStatement* stmt,
4839 BreakType type,
4840 Scope** scope,
4841 int* drop_extra) {
4842 *drop_extra = 0;
4843 BreakAndContinueScope* current = this;
4844 while (current != NULL && current->info()->target() != stmt) {
4845 *drop_extra += current->info()->drop_extra();
4846 current = current->next();
4847 }
4848 DCHECK(current != NULL); // Always found (unless stack is malformed).
4849 *scope = current->info()->scope();
4850
4851 if (type == BREAK) {
4852 *drop_extra += current->info()->drop_extra();
4853 }
4854
4855 HBasicBlock* block = NULL;
4856 switch (type) {
4857 case BREAK:
4858 block = current->info()->break_block();
4859 if (block == NULL) {
4860 block = current->owner()->graph()->CreateBasicBlock();
4861 current->info()->set_break_block(block);
4862 }
4863 break;
4864
4865 case CONTINUE:
4866 block = current->info()->continue_block();
4867 if (block == NULL) {
4868 block = current->owner()->graph()->CreateBasicBlock();
4869 current->info()->set_continue_block(block);
4870 }
4871 break;
4872 }
4873
4874 return block;
4875}
4876
4877
4878void HOptimizedGraphBuilder::VisitContinueStatement(
4879 ContinueStatement* stmt) {
4880 DCHECK(!HasStackOverflow());
4881 DCHECK(current_block() != NULL);
4882 DCHECK(current_block()->HasPredecessor());
4883 Scope* outer_scope = NULL;
4884 Scope* inner_scope = scope();
4885 int drop_extra = 0;
4886 HBasicBlock* continue_block = break_scope()->Get(
4887 stmt->target(), BreakAndContinueScope::CONTINUE,
4888 &outer_scope, &drop_extra);
4889 HValue* context = environment()->context();
4890 Drop(drop_extra);
4891 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4892 if (context_pop_count > 0) {
4893 while (context_pop_count-- > 0) {
4894 HInstruction* context_instruction = Add<HLoadNamedField>(
4895 context, nullptr,
4896 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4897 context = context_instruction;
4898 }
4899 HInstruction* instr = Add<HStoreFrameContext>(context);
4900 if (instr->HasObservableSideEffects()) {
4901 AddSimulate(stmt->target()->EntryId(), REMOVABLE_SIMULATE);
4902 }
4903 environment()->BindContext(context);
4904 }
4905
4906 Goto(continue_block);
4907 set_current_block(NULL);
4908}
4909
4910
4911void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4912 DCHECK(!HasStackOverflow());
4913 DCHECK(current_block() != NULL);
4914 DCHECK(current_block()->HasPredecessor());
4915 Scope* outer_scope = NULL;
4916 Scope* inner_scope = scope();
4917 int drop_extra = 0;
4918 HBasicBlock* break_block = break_scope()->Get(
4919 stmt->target(), BreakAndContinueScope::BREAK,
4920 &outer_scope, &drop_extra);
4921 HValue* context = environment()->context();
4922 Drop(drop_extra);
4923 int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4924 if (context_pop_count > 0) {
4925 while (context_pop_count-- > 0) {
4926 HInstruction* context_instruction = Add<HLoadNamedField>(
4927 context, nullptr,
4928 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4929 context = context_instruction;
4930 }
4931 HInstruction* instr = Add<HStoreFrameContext>(context);
4932 if (instr->HasObservableSideEffects()) {
4933 AddSimulate(stmt->target()->ExitId(), REMOVABLE_SIMULATE);
4934 }
4935 environment()->BindContext(context);
4936 }
4937 Goto(break_block);
4938 set_current_block(NULL);
4939}
4940
4941
4942void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4943 DCHECK(!HasStackOverflow());
4944 DCHECK(current_block() != NULL);
4945 DCHECK(current_block()->HasPredecessor());
4946 FunctionState* state = function_state();
4947 AstContext* context = call_context();
4948 if (context == NULL) {
4949 // Not an inlined return, so an actual one.
4950 CHECK_ALIVE(VisitForValue(stmt->expression()));
4951 HValue* result = environment()->Pop();
4952 Add<HReturn>(result);
4953 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4954 // Return from an inlined construct call. In a test context the return value
4955 // will always evaluate to true, in a value context the return value needs
4956 // to be a JSObject.
4957 if (context->IsTest()) {
4958 TestContext* test = TestContext::cast(context);
4959 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4960 Goto(test->if_true(), state);
4961 } else if (context->IsEffect()) {
4962 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4963 Goto(function_return(), state);
4964 } else {
4965 DCHECK(context->IsValue());
4966 CHECK_ALIVE(VisitForValue(stmt->expression()));
4967 HValue* return_value = Pop();
4968 HValue* receiver = environment()->arguments_environment()->Lookup(0);
4969 HHasInstanceTypeAndBranch* typecheck =
4970 New<HHasInstanceTypeAndBranch>(return_value,
4971 FIRST_JS_RECEIVER_TYPE,
4972 LAST_JS_RECEIVER_TYPE);
4973 HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4974 HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4975 typecheck->SetSuccessorAt(0, if_spec_object);
4976 typecheck->SetSuccessorAt(1, not_spec_object);
4977 FinishCurrentBlock(typecheck);
4978 AddLeaveInlined(if_spec_object, return_value, state);
4979 AddLeaveInlined(not_spec_object, receiver, state);
4980 }
4981 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4982 // Return from an inlined setter call. The returned value is never used, the
4983 // value of an assignment is always the value of the RHS of the assignment.
4984 CHECK_ALIVE(VisitForEffect(stmt->expression()));
4985 if (context->IsTest()) {
4986 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4987 context->ReturnValue(rhs);
4988 } else if (context->IsEffect()) {
4989 Goto(function_return(), state);
4990 } else {
4991 DCHECK(context->IsValue());
4992 HValue* rhs = environment()->arguments_environment()->Lookup(1);
4993 AddLeaveInlined(rhs, state);
4994 }
4995 } else {
4996 // Return from a normal inlined function. Visit the subexpression in the
4997 // expression context of the call.
4998 if (context->IsTest()) {
4999 TestContext* test = TestContext::cast(context);
5000 VisitForControl(stmt->expression(), test->if_true(), test->if_false());
5001 } else if (context->IsEffect()) {
5002 // Visit in value context and ignore the result. This is needed to keep
5003 // environment in sync with full-codegen since some visitors (e.g.
5004 // VisitCountOperation) use the operand stack differently depending on
5005 // context.
5006 CHECK_ALIVE(VisitForValue(stmt->expression()));
5007 Pop();
5008 Goto(function_return(), state);
5009 } else {
5010 DCHECK(context->IsValue());
5011 CHECK_ALIVE(VisitForValue(stmt->expression()));
5012 AddLeaveInlined(Pop(), state);
5013 }
5014 }
5015 set_current_block(NULL);
5016}
5017
5018
5019void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
5020 DCHECK(!HasStackOverflow());
5021 DCHECK(current_block() != NULL);
5022 DCHECK(current_block()->HasPredecessor());
5023 return Bailout(kWithStatement);
5024}
5025
5026
5027void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
5028 DCHECK(!HasStackOverflow());
5029 DCHECK(current_block() != NULL);
5030 DCHECK(current_block()->HasPredecessor());
5031
5032 ZoneList<CaseClause*>* clauses = stmt->cases();
5033 int clause_count = clauses->length();
5034 ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
5035
5036 CHECK_ALIVE(VisitForValue(stmt->tag()));
5037 Add<HSimulate>(stmt->EntryId());
5038 HValue* tag_value = Top();
5039 Type* tag_type = stmt->tag()->bounds().lower;
5040
5041 // 1. Build all the tests, with dangling true branches
5042 BailoutId default_id = BailoutId::None();
5043 for (int i = 0; i < clause_count; ++i) {
5044 CaseClause* clause = clauses->at(i);
5045 if (clause->is_default()) {
5046 body_blocks.Add(NULL, zone());
5047 if (default_id.IsNone()) default_id = clause->EntryId();
5048 continue;
5049 }
5050
5051 // Generate a compare and branch.
5052 CHECK_BAILOUT(VisitForValue(clause->label()));
5053 if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
5054 HValue* label_value = Pop();
5055
5056 Type* label_type = clause->label()->bounds().lower;
5057 Type* combined_type = clause->compare_type();
5058 HControlInstruction* compare = BuildCompareInstruction(
5059 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
5060 combined_type,
5061 ScriptPositionToSourcePosition(stmt->tag()->position()),
5062 ScriptPositionToSourcePosition(clause->label()->position()),
5063 PUSH_BEFORE_SIMULATE, clause->id());
5064
5065 HBasicBlock* next_test_block = graph()->CreateBasicBlock();
5066 HBasicBlock* body_block = graph()->CreateBasicBlock();
5067 body_blocks.Add(body_block, zone());
5068 compare->SetSuccessorAt(0, body_block);
5069 compare->SetSuccessorAt(1, next_test_block);
5070 FinishCurrentBlock(compare);
5071
5072 set_current_block(body_block);
5073 Drop(1); // tag_value
5074
5075 set_current_block(next_test_block);
5076 }
5077
5078 // Save the current block to use for the default or to join with the
5079 // exit.
5080 HBasicBlock* last_block = current_block();
5081 Drop(1); // tag_value
5082
5083 // 2. Loop over the clauses and the linked list of tests in lockstep,
5084 // translating the clause bodies.
5085 HBasicBlock* fall_through_block = NULL;
5086
5087 BreakAndContinueInfo break_info(stmt, scope());
5088 { BreakAndContinueScope push(&break_info, this);
5089 for (int i = 0; i < clause_count; ++i) {
5090 CaseClause* clause = clauses->at(i);
5091
5092 // Identify the block where normal (non-fall-through) control flow
5093 // goes to.
5094 HBasicBlock* normal_block = NULL;
5095 if (clause->is_default()) {
5096 if (last_block == NULL) continue;
5097 normal_block = last_block;
5098 last_block = NULL; // Cleared to indicate we've handled it.
5099 } else {
5100 normal_block = body_blocks[i];
5101 }
5102
5103 if (fall_through_block == NULL) {
5104 set_current_block(normal_block);
5105 } else {
5106 HBasicBlock* join = CreateJoin(fall_through_block,
5107 normal_block,
5108 clause->EntryId());
5109 set_current_block(join);
5110 }
5111
5112 CHECK_BAILOUT(VisitStatements(clause->statements()));
5113 fall_through_block = current_block();
5114 }
5115 }
5116
5117 // Create an up-to-3-way join. Use the break block if it exists since
5118 // it's already a join block.
5119 HBasicBlock* break_block = break_info.break_block();
5120 if (break_block == NULL) {
5121 set_current_block(CreateJoin(fall_through_block,
5122 last_block,
5123 stmt->ExitId()));
5124 } else {
5125 if (fall_through_block != NULL) Goto(fall_through_block, break_block);
5126 if (last_block != NULL) Goto(last_block, break_block);
5127 break_block->SetJoinId(stmt->ExitId());
5128 set_current_block(break_block);
5129 }
5130}
5131
5132
5133void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
5134 HBasicBlock* loop_entry) {
5135 Add<HSimulate>(stmt->StackCheckId());
5136 HStackCheck* stack_check =
5137 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
5138 DCHECK(loop_entry->IsLoopHeader());
5139 loop_entry->loop_information()->set_stack_check(stack_check);
5140 CHECK_BAILOUT(Visit(stmt->body()));
5141}
5142
5143
5144void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
5145 DCHECK(!HasStackOverflow());
5146 DCHECK(current_block() != NULL);
5147 DCHECK(current_block()->HasPredecessor());
5148 DCHECK(current_block() != NULL);
5149 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5150
5151 BreakAndContinueInfo break_info(stmt, scope());
5152 {
5153 BreakAndContinueScope push(&break_info, this);
5154 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5155 }
5156 HBasicBlock* body_exit =
5157 JoinContinue(stmt, current_block(), break_info.continue_block());
5158 HBasicBlock* loop_successor = NULL;
5159 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
5160 set_current_block(body_exit);
5161 loop_successor = graph()->CreateBasicBlock();
5162 if (stmt->cond()->ToBooleanIsFalse()) {
5163 loop_entry->loop_information()->stack_check()->Eliminate();
5164 Goto(loop_successor);
5165 body_exit = NULL;
5166 } else {
5167 // The block for a true condition, the actual predecessor block of the
5168 // back edge.
5169 body_exit = graph()->CreateBasicBlock();
5170 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
5171 }
5172 if (body_exit != NULL && body_exit->HasPredecessor()) {
5173 body_exit->SetJoinId(stmt->BackEdgeId());
5174 } else {
5175 body_exit = NULL;
5176 }
5177 if (loop_successor->HasPredecessor()) {
5178 loop_successor->SetJoinId(stmt->ExitId());
5179 } else {
5180 loop_successor = NULL;
5181 }
5182 }
5183 HBasicBlock* loop_exit = CreateLoop(stmt,
5184 loop_entry,
5185 body_exit,
5186 loop_successor,
5187 break_info.break_block());
5188 set_current_block(loop_exit);
5189}
5190
5191
5192void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
5193 DCHECK(!HasStackOverflow());
5194 DCHECK(current_block() != NULL);
5195 DCHECK(current_block()->HasPredecessor());
5196 DCHECK(current_block() != NULL);
5197 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5198
5199 // If the condition is constant true, do not generate a branch.
5200 HBasicBlock* loop_successor = NULL;
5201 if (!stmt->cond()->ToBooleanIsTrue()) {
5202 HBasicBlock* body_entry = graph()->CreateBasicBlock();
5203 loop_successor = graph()->CreateBasicBlock();
5204 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5205 if (body_entry->HasPredecessor()) {
5206 body_entry->SetJoinId(stmt->BodyId());
5207 set_current_block(body_entry);
5208 }
5209 if (loop_successor->HasPredecessor()) {
5210 loop_successor->SetJoinId(stmt->ExitId());
5211 } else {
5212 loop_successor = NULL;
5213 }
5214 }
5215
5216 BreakAndContinueInfo break_info(stmt, scope());
5217 if (current_block() != NULL) {
5218 BreakAndContinueScope push(&break_info, this);
5219 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5220 }
5221 HBasicBlock* body_exit =
5222 JoinContinue(stmt, current_block(), break_info.continue_block());
5223 HBasicBlock* loop_exit = CreateLoop(stmt,
5224 loop_entry,
5225 body_exit,
5226 loop_successor,
5227 break_info.break_block());
5228 set_current_block(loop_exit);
5229}
5230
5231
5232void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
5233 DCHECK(!HasStackOverflow());
5234 DCHECK(current_block() != NULL);
5235 DCHECK(current_block()->HasPredecessor());
5236 if (stmt->init() != NULL) {
5237 CHECK_ALIVE(Visit(stmt->init()));
5238 }
5239 DCHECK(current_block() != NULL);
5240 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5241
5242 HBasicBlock* loop_successor = NULL;
5243 if (stmt->cond() != NULL) {
5244 HBasicBlock* body_entry = graph()->CreateBasicBlock();
5245 loop_successor = graph()->CreateBasicBlock();
5246 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
5247 if (body_entry->HasPredecessor()) {
5248 body_entry->SetJoinId(stmt->BodyId());
5249 set_current_block(body_entry);
5250 }
5251 if (loop_successor->HasPredecessor()) {
5252 loop_successor->SetJoinId(stmt->ExitId());
5253 } else {
5254 loop_successor = NULL;
5255 }
5256 }
5257
5258 BreakAndContinueInfo break_info(stmt, scope());
5259 if (current_block() != NULL) {
5260 BreakAndContinueScope push(&break_info, this);
5261 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5262 }
5263 HBasicBlock* body_exit =
5264 JoinContinue(stmt, current_block(), break_info.continue_block());
5265
5266 if (stmt->next() != NULL && body_exit != NULL) {
5267 set_current_block(body_exit);
5268 CHECK_BAILOUT(Visit(stmt->next()));
5269 body_exit = current_block();
5270 }
5271
5272 HBasicBlock* loop_exit = CreateLoop(stmt,
5273 loop_entry,
5274 body_exit,
5275 loop_successor,
5276 break_info.break_block());
5277 set_current_block(loop_exit);
5278}
5279
5280
5281void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
5282 DCHECK(!HasStackOverflow());
5283 DCHECK(current_block() != NULL);
5284 DCHECK(current_block()->HasPredecessor());
5285
5286 if (!FLAG_optimize_for_in) {
5287 return Bailout(kForInStatementOptimizationIsDisabled);
5288 }
5289
5290 if (!stmt->each()->IsVariableProxy() ||
5291 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
5292 return Bailout(kForInStatementWithNonLocalEachVariable);
5293 }
5294
5295 Variable* each_var = stmt->each()->AsVariableProxy()->var();
5296
5297 CHECK_ALIVE(VisitForValue(stmt->enumerable()));
5298 HValue* enumerable = Top(); // Leave enumerable at the top.
5299
5300 IfBuilder if_undefined_or_null(this);
5301 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
5302 enumerable, graph()->GetConstantUndefined());
5303 if_undefined_or_null.Or();
5304 if_undefined_or_null.If<HCompareObjectEqAndBranch>(
5305 enumerable, graph()->GetConstantNull());
5306 if_undefined_or_null.ThenDeopt(Deoptimizer::kUndefinedOrNullInForIn);
5307 if_undefined_or_null.End();
5308 BuildForInBody(stmt, each_var, enumerable);
5309}
5310
5311
5312void HOptimizedGraphBuilder::BuildForInBody(ForInStatement* stmt,
5313 Variable* each_var,
5314 HValue* enumerable) {
5315 HInstruction* map;
5316 HInstruction* array;
5317 HInstruction* enum_length;
5318 bool fast = stmt->for_in_type() == ForInStatement::FAST_FOR_IN;
5319 if (fast) {
5320 map = Add<HForInPrepareMap>(enumerable);
5321 Add<HSimulate>(stmt->PrepareId());
5322
5323 array = Add<HForInCacheArray>(enumerable, map,
5324 DescriptorArray::kEnumCacheBridgeCacheIndex);
5325 enum_length = Add<HMapEnumLength>(map);
5326
5327 HInstruction* index_cache = Add<HForInCacheArray>(
5328 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
5329 HForInCacheArray::cast(array)
5330 ->set_index_cache(HForInCacheArray::cast(index_cache));
5331 } else {
5332 Add<HSimulate>(stmt->PrepareId());
5333 {
5334 NoObservableSideEffectsScope no_effects(this);
5335 BuildJSObjectCheck(enumerable, 0);
5336 }
5337 Add<HSimulate>(stmt->ToObjectId());
5338
5339 map = graph()->GetConstant1();
5340 Runtime::FunctionId function_id = Runtime::kGetPropertyNamesFast;
5341 Add<HPushArguments>(enumerable);
5342 array = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
5343 Push(array);
5344 Add<HSimulate>(stmt->EnumId());
5345 Drop(1);
5346 Handle<Map> array_map = isolate()->factory()->fixed_array_map();
5347 HValue* check = Add<HCheckMaps>(array, array_map);
5348 enum_length = AddLoadFixedArrayLength(array, check);
5349 }
5350
5351 HInstruction* start_index = Add<HConstant>(0);
5352
5353 Push(map);
5354 Push(array);
5355 Push(enum_length);
5356 Push(start_index);
5357
5358 HBasicBlock* loop_entry = BuildLoopEntry(stmt);
5359
5360 // Reload the values to ensure we have up-to-date values inside of the loop.
5361 // This is relevant especially for OSR where the values don't come from the
5362 // computation above, but from the OSR entry block.
5363 enumerable = environment()->ExpressionStackAt(4);
5364 HValue* index = environment()->ExpressionStackAt(0);
5365 HValue* limit = environment()->ExpressionStackAt(1);
5366
5367 // Check that we still have more keys.
5368 HCompareNumericAndBranch* compare_index =
5369 New<HCompareNumericAndBranch>(index, limit, Token::LT);
5370 compare_index->set_observed_input_representation(
5371 Representation::Smi(), Representation::Smi());
5372
5373 HBasicBlock* loop_body = graph()->CreateBasicBlock();
5374 HBasicBlock* loop_successor = graph()->CreateBasicBlock();
5375
5376 compare_index->SetSuccessorAt(0, loop_body);
5377 compare_index->SetSuccessorAt(1, loop_successor);
5378 FinishCurrentBlock(compare_index);
5379
5380 set_current_block(loop_successor);
5381 Drop(5);
5382
5383 set_current_block(loop_body);
5384
5385 HValue* key =
5386 Add<HLoadKeyed>(environment()->ExpressionStackAt(2), // Enum cache.
5387 index, index, nullptr, FAST_ELEMENTS);
5388
5389 if (fast) {
5390 // Check if the expected map still matches that of the enumerable.
5391 // If not just deoptimize.
5392 Add<HCheckMapValue>(enumerable, environment()->ExpressionStackAt(3));
5393 Bind(each_var, key);
5394 } else {
5395 Add<HPushArguments>(enumerable, key);
5396 Runtime::FunctionId function_id = Runtime::kForInFilter;
5397 key = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 2);
5398 Push(key);
5399 Add<HSimulate>(stmt->FilterId());
5400 key = Pop();
5401 Bind(each_var, key);
5402 IfBuilder if_undefined(this);
5403 if_undefined.If<HCompareObjectEqAndBranch>(key,
5404 graph()->GetConstantUndefined());
5405 if_undefined.ThenDeopt(Deoptimizer::kUndefined);
5406 if_undefined.End();
5407 Add<HSimulate>(stmt->AssignmentId());
5408 }
5409
5410 BreakAndContinueInfo break_info(stmt, scope(), 5);
5411 {
5412 BreakAndContinueScope push(&break_info, this);
5413 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry));
5414 }
5415
5416 HBasicBlock* body_exit =
5417 JoinContinue(stmt, current_block(), break_info.continue_block());
5418
5419 if (body_exit != NULL) {
5420 set_current_block(body_exit);
5421
5422 HValue* current_index = Pop();
5423 Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
5424 body_exit = current_block();
5425 }
5426
5427 HBasicBlock* loop_exit = CreateLoop(stmt,
5428 loop_entry,
5429 body_exit,
5430 loop_successor,
5431 break_info.break_block());
5432
5433 set_current_block(loop_exit);
5434}
5435
5436
5437void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5438 DCHECK(!HasStackOverflow());
5439 DCHECK(current_block() != NULL);
5440 DCHECK(current_block()->HasPredecessor());
5441 return Bailout(kForOfStatement);
5442}
5443
5444
5445void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5446 DCHECK(!HasStackOverflow());
5447 DCHECK(current_block() != NULL);
5448 DCHECK(current_block()->HasPredecessor());
5449 return Bailout(kTryCatchStatement);
5450}
5451
5452
5453void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5454 TryFinallyStatement* stmt) {
5455 DCHECK(!HasStackOverflow());
5456 DCHECK(current_block() != NULL);
5457 DCHECK(current_block()->HasPredecessor());
5458 return Bailout(kTryFinallyStatement);
5459}
5460
5461
5462void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5463 DCHECK(!HasStackOverflow());
5464 DCHECK(current_block() != NULL);
5465 DCHECK(current_block()->HasPredecessor());
5466 return Bailout(kDebuggerStatement);
5467}
5468
5469
5470void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5471 UNREACHABLE();
5472}
5473
5474
5475void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5476 DCHECK(!HasStackOverflow());
5477 DCHECK(current_block() != NULL);
5478 DCHECK(current_block()->HasPredecessor());
5479 Handle<SharedFunctionInfo> shared_info = Compiler::GetSharedFunctionInfo(
5480 expr, current_info()->script(), top_info());
5481 // We also have a stack overflow if the recursive compilation did.
5482 if (HasStackOverflow()) return;
5483 // Use the fast case closure allocation code that allocates in new
5484 // space for nested functions that don't need literals cloning.
5485 HConstant* shared_info_value = Add<HConstant>(shared_info);
5486 HInstruction* instr;
5487 if (!expr->pretenure() && shared_info->num_literals() == 0) {
5488 FastNewClosureStub stub(isolate(), shared_info->language_mode(),
5489 shared_info->kind());
5490 FastNewClosureDescriptor descriptor(isolate());
5491 HValue* values[] = {context(), shared_info_value};
5492 HConstant* stub_value = Add<HConstant>(stub.GetCode());
5493 instr = New<HCallWithDescriptor>(stub_value, 0, descriptor,
5494 Vector<HValue*>(values, arraysize(values)),
5495 NORMAL_CALL);
5496 } else {
5497 Add<HPushArguments>(shared_info_value);
5498 Runtime::FunctionId function_id =
5499 expr->pretenure() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
5500 instr = New<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
5501 }
5502 return ast_context()->ReturnInstruction(instr, expr->id());
5503}
5504
5505
5506void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
5507 DCHECK(!HasStackOverflow());
5508 DCHECK(current_block() != NULL);
5509 DCHECK(current_block()->HasPredecessor());
5510 return Bailout(kClassLiteral);
5511}
5512
5513
5514void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5515 NativeFunctionLiteral* expr) {
5516 DCHECK(!HasStackOverflow());
5517 DCHECK(current_block() != NULL);
5518 DCHECK(current_block()->HasPredecessor());
5519 return Bailout(kNativeFunctionLiteral);
5520}
5521
5522
5523void HOptimizedGraphBuilder::VisitDoExpression(DoExpression* expr) {
5524 DCHECK(!HasStackOverflow());
5525 DCHECK(current_block() != NULL);
5526 DCHECK(current_block()->HasPredecessor());
5527 return Bailout(kDoExpression);
5528}
5529
5530
5531void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5532 DCHECK(!HasStackOverflow());
5533 DCHECK(current_block() != NULL);
5534 DCHECK(current_block()->HasPredecessor());
5535 HBasicBlock* cond_true = graph()->CreateBasicBlock();
5536 HBasicBlock* cond_false = graph()->CreateBasicBlock();
5537 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5538
5539 // Visit the true and false subexpressions in the same AST context as the
5540 // whole expression.
5541 if (cond_true->HasPredecessor()) {
5542 cond_true->SetJoinId(expr->ThenId());
5543 set_current_block(cond_true);
5544 CHECK_BAILOUT(Visit(expr->then_expression()));
5545 cond_true = current_block();
5546 } else {
5547 cond_true = NULL;
5548 }
5549
5550 if (cond_false->HasPredecessor()) {
5551 cond_false->SetJoinId(expr->ElseId());
5552 set_current_block(cond_false);
5553 CHECK_BAILOUT(Visit(expr->else_expression()));
5554 cond_false = current_block();
5555 } else {
5556 cond_false = NULL;
5557 }
5558
5559 if (!ast_context()->IsTest()) {
5560 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5561 set_current_block(join);
5562 if (join != NULL && !ast_context()->IsEffect()) {
5563 return ast_context()->ReturnValue(Pop());
5564 }
5565 }
5566}
5567
5568
5569HOptimizedGraphBuilder::GlobalPropertyAccess
5570HOptimizedGraphBuilder::LookupGlobalProperty(Variable* var, LookupIterator* it,
5571 PropertyAccessType access_type) {
5572 if (var->is_this() || !current_info()->has_global_object()) {
5573 return kUseGeneric;
5574 }
5575
5576 switch (it->state()) {
5577 case LookupIterator::ACCESSOR:
5578 case LookupIterator::ACCESS_CHECK:
5579 case LookupIterator::INTERCEPTOR:
5580 case LookupIterator::INTEGER_INDEXED_EXOTIC:
5581 case LookupIterator::NOT_FOUND:
5582 return kUseGeneric;
5583 case LookupIterator::DATA:
5584 if (access_type == STORE && it->IsReadOnly()) return kUseGeneric;
5585 return kUseCell;
5586 case LookupIterator::JSPROXY:
5587 case LookupIterator::TRANSITION:
5588 UNREACHABLE();
5589 }
5590 UNREACHABLE();
5591 return kUseGeneric;
5592}
5593
5594
5595HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
5596 DCHECK(var->IsContextSlot());
5597 HValue* context = environment()->context();
5598 int length = scope()->ContextChainLength(var->scope());
5599 while (length-- > 0) {
5600 context = Add<HLoadNamedField>(
5601 context, nullptr,
5602 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5603 }
5604 return context;
5605}
5606
5607
5608void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5609 DCHECK(!HasStackOverflow());
5610 DCHECK(current_block() != NULL);
5611 DCHECK(current_block()->HasPredecessor());
5612 Variable* variable = expr->var();
5613 switch (variable->location()) {
5614 case VariableLocation::GLOBAL:
5615 case VariableLocation::UNALLOCATED: {
5616 if (IsLexicalVariableMode(variable->mode())) {
5617 // TODO(rossberg): should this be an DCHECK?
5618 return Bailout(kReferenceToGlobalLexicalVariable);
5619 }
5620 // Handle known global constants like 'undefined' specially to avoid a
5621 // load from a global cell for them.
5622 Handle<Object> constant_value =
5623 isolate()->factory()->GlobalConstantFor(variable->name());
5624 if (!constant_value.is_null()) {
5625 HConstant* instr = New<HConstant>(constant_value);
5626 return ast_context()->ReturnInstruction(instr, expr->id());
5627 }
5628
5629 Handle<JSGlobalObject> global(current_info()->global_object());
5630
5631 // Lookup in script contexts.
5632 {
5633 Handle<ScriptContextTable> script_contexts(
5634 global->native_context()->script_context_table());
5635 ScriptContextTable::LookupResult lookup;
5636 if (ScriptContextTable::Lookup(script_contexts, variable->name(),
5637 &lookup)) {
5638 Handle<Context> script_context = ScriptContextTable::GetContext(
5639 script_contexts, lookup.context_index);
5640 Handle<Object> current_value =
5641 FixedArray::get(script_context, lookup.slot_index);
5642
5643 // If the values is not the hole, it will stay initialized,
5644 // so no need to generate a check.
5645 if (*current_value == *isolate()->factory()->the_hole_value()) {
5646 return Bailout(kReferenceToUninitializedVariable);
5647 }
5648 HInstruction* result = New<HLoadNamedField>(
5649 Add<HConstant>(script_context), nullptr,
5650 HObjectAccess::ForContextSlot(lookup.slot_index));
5651 return ast_context()->ReturnInstruction(result, expr->id());
5652 }
5653 }
5654
5655 LookupIterator it(global, variable->name(), LookupIterator::OWN);
5656 GlobalPropertyAccess type = LookupGlobalProperty(variable, &it, LOAD);
5657
5658 if (type == kUseCell) {
5659 Handle<PropertyCell> cell = it.GetPropertyCell();
5660 top_info()->dependencies()->AssumePropertyCell(cell);
5661 auto cell_type = it.property_details().cell_type();
5662 if (cell_type == PropertyCellType::kConstant ||
5663 cell_type == PropertyCellType::kUndefined) {
5664 Handle<Object> constant_object(cell->value(), isolate());
5665 if (constant_object->IsConsString()) {
5666 constant_object =
5667 String::Flatten(Handle<String>::cast(constant_object));
5668 }
5669 HConstant* constant = New<HConstant>(constant_object);
5670 return ast_context()->ReturnInstruction(constant, expr->id());
5671 } else {
5672 auto access = HObjectAccess::ForPropertyCellValue();
5673 UniqueSet<Map>* field_maps = nullptr;
5674 if (cell_type == PropertyCellType::kConstantType) {
5675 switch (cell->GetConstantType()) {
5676 case PropertyCellConstantType::kSmi:
5677 access = access.WithRepresentation(Representation::Smi());
5678 break;
5679 case PropertyCellConstantType::kStableMap: {
5680 // Check that the map really is stable. The heap object could
5681 // have mutated without the cell updating state. In that case,
5682 // make no promises about the loaded value except that it's a
5683 // heap object.
5684 access =
5685 access.WithRepresentation(Representation::HeapObject());
5686 Handle<Map> map(HeapObject::cast(cell->value())->map());
5687 if (map->is_stable()) {
5688 field_maps = new (zone())
5689 UniqueSet<Map>(Unique<Map>::CreateImmovable(map), zone());
5690 }
5691 break;
5692 }
5693 }
5694 }
5695 HConstant* cell_constant = Add<HConstant>(cell);
5696 HLoadNamedField* instr;
5697 if (field_maps == nullptr) {
5698 instr = New<HLoadNamedField>(cell_constant, nullptr, access);
5699 } else {
5700 instr = New<HLoadNamedField>(cell_constant, nullptr, access,
5701 field_maps, HType::HeapObject());
5702 }
5703 instr->ClearDependsOnFlag(kInobjectFields);
5704 instr->SetDependsOnFlag(kGlobalVars);
5705 return ast_context()->ReturnInstruction(instr, expr->id());
5706 }
5707 } else {
5708 HValue* global_object = Add<HLoadNamedField>(
5709 BuildGetNativeContext(), nullptr,
5710 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
5711 HLoadGlobalGeneric* instr = New<HLoadGlobalGeneric>(
5712 global_object, variable->name(), ast_context()->typeof_mode());
5713 instr->SetVectorAndSlot(handle(current_feedback_vector(), isolate()),
5714 expr->VariableFeedbackSlot());
5715 return ast_context()->ReturnInstruction(instr, expr->id());
5716 }
5717 }
5718
5719 case VariableLocation::PARAMETER:
5720 case VariableLocation::LOCAL: {
5721 HValue* value = LookupAndMakeLive(variable);
5722 if (value == graph()->GetConstantHole()) {
5723 DCHECK(IsDeclaredVariableMode(variable->mode()) &&
5724 variable->mode() != VAR);
5725 return Bailout(kReferenceToUninitializedVariable);
5726 }
5727 return ast_context()->ReturnValue(value);
5728 }
5729
5730 case VariableLocation::CONTEXT: {
5731 HValue* context = BuildContextChainWalk(variable);
5732 HLoadContextSlot::Mode mode;
5733 switch (variable->mode()) {
5734 case LET:
5735 case CONST:
5736 mode = HLoadContextSlot::kCheckDeoptimize;
5737 break;
5738 case CONST_LEGACY:
5739 mode = HLoadContextSlot::kCheckReturnUndefined;
5740 break;
5741 default:
5742 mode = HLoadContextSlot::kNoCheck;
5743 break;
5744 }
5745 HLoadContextSlot* instr =
5746 new(zone()) HLoadContextSlot(context, variable->index(), mode);
5747 return ast_context()->ReturnInstruction(instr, expr->id());
5748 }
5749
5750 case VariableLocation::LOOKUP:
5751 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5752 }
5753}
5754
5755
5756void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5757 DCHECK(!HasStackOverflow());
5758 DCHECK(current_block() != NULL);
5759 DCHECK(current_block()->HasPredecessor());
5760 HConstant* instr = New<HConstant>(expr->value());
5761 return ast_context()->ReturnInstruction(instr, expr->id());
5762}
5763
5764
5765void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5766 DCHECK(!HasStackOverflow());
5767 DCHECK(current_block() != NULL);
5768 DCHECK(current_block()->HasPredecessor());
5769 Callable callable = CodeFactory::FastCloneRegExp(isolate());
5770 HValue* values[] = {
5771 context(), AddThisFunction(), Add<HConstant>(expr->literal_index()),
5772 Add<HConstant>(expr->pattern()), Add<HConstant>(expr->flags())};
5773 HConstant* stub_value = Add<HConstant>(callable.code());
5774 HInstruction* instr = New<HCallWithDescriptor>(
5775 stub_value, 0, callable.descriptor(),
5776 Vector<HValue*>(values, arraysize(values)), NORMAL_CALL);
5777 return ast_context()->ReturnInstruction(instr, expr->id());
5778}
5779
5780
5781static bool CanInlinePropertyAccess(Handle<Map> map) {
5782 if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
5783 if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
5784 return map->IsJSObjectMap() && !map->is_dictionary_map() &&
5785 !map->has_named_interceptor() &&
5786 // TODO(verwaest): Whitelist contexts to which we have access.
5787 !map->is_access_check_needed();
5788}
5789
5790
5791// Determines whether the given array or object literal boilerplate satisfies
5792// all limits to be considered for fast deep-copying and computes the total
5793// size of all objects that are part of the graph.
5794static bool IsFastLiteral(Handle<JSObject> boilerplate,
5795 int max_depth,
5796 int* max_properties) {
5797 if (boilerplate->map()->is_deprecated() &&
5798 !JSObject::TryMigrateInstance(boilerplate)) {
5799 return false;
5800 }
5801
5802 DCHECK(max_depth >= 0 && *max_properties >= 0);
5803 if (max_depth == 0) return false;
5804
5805 Isolate* isolate = boilerplate->GetIsolate();
5806 Handle<FixedArrayBase> elements(boilerplate->elements());
5807 if (elements->length() > 0 &&
5808 elements->map() != isolate->heap()->fixed_cow_array_map()) {
5809 if (boilerplate->HasFastSmiOrObjectElements()) {
5810 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5811 int length = elements->length();
5812 for (int i = 0; i < length; i++) {
5813 if ((*max_properties)-- == 0) return false;
5814 Handle<Object> value(fast_elements->get(i), isolate);
5815 if (value->IsJSObject()) {
5816 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5817 if (!IsFastLiteral(value_object,
5818 max_depth - 1,
5819 max_properties)) {
5820 return false;
5821 }
5822 }
5823 }
5824 } else if (!boilerplate->HasFastDoubleElements()) {
5825 return false;
5826 }
5827 }
5828
5829 Handle<FixedArray> properties(boilerplate->properties());
5830 if (properties->length() > 0) {
5831 return false;
5832 } else {
5833 Handle<DescriptorArray> descriptors(
5834 boilerplate->map()->instance_descriptors());
5835 int limit = boilerplate->map()->NumberOfOwnDescriptors();
5836 for (int i = 0; i < limit; i++) {
5837 PropertyDetails details = descriptors->GetDetails(i);
5838 if (details.type() != DATA) continue;
5839 if ((*max_properties)-- == 0) return false;
5840 FieldIndex field_index = FieldIndex::ForDescriptor(boilerplate->map(), i);
5841 if (boilerplate->IsUnboxedDoubleField(field_index)) continue;
5842 Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
5843 isolate);
5844 if (value->IsJSObject()) {
5845 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5846 if (!IsFastLiteral(value_object,
5847 max_depth - 1,
5848 max_properties)) {
5849 return false;
5850 }
5851 }
5852 }
5853 }
5854 return true;
5855}
5856
5857
5858void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5859 DCHECK(!HasStackOverflow());
5860 DCHECK(current_block() != NULL);
5861 DCHECK(current_block()->HasPredecessor());
5862
5863 Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5864 HInstruction* literal;
5865
5866 // Check whether to use fast or slow deep-copying for boilerplate.
5867 int max_properties = kMaxFastLiteralProperties;
5868 Handle<Object> literals_cell(
5869 closure->literals()->literal(expr->literal_index()), isolate());
5870 Handle<AllocationSite> site;
5871 Handle<JSObject> boilerplate;
5872 if (!literals_cell->IsUndefined()) {
5873 // Retrieve the boilerplate
5874 site = Handle<AllocationSite>::cast(literals_cell);
5875 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5876 isolate());
5877 }
5878
5879 if (!boilerplate.is_null() &&
5880 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5881 AllocationSiteUsageContext site_context(isolate(), site, false);
5882 site_context.EnterNewScope();
5883 literal = BuildFastLiteral(boilerplate, &site_context);
5884 site_context.ExitScope(site, boilerplate);
5885 } else {
5886 NoObservableSideEffectsScope no_effects(this);
5887 Handle<FixedArray> constant_properties = expr->constant_properties();
5888 int literal_index = expr->literal_index();
5889 int flags = expr->ComputeFlags(true);
5890
5891 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
5892 Add<HConstant>(constant_properties),
5893 Add<HConstant>(flags));
5894
5895 Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
5896 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
5897 }
5898
5899 // The object is expected in the bailout environment during computation
5900 // of the property values and is the value of the entire expression.
5901 Push(literal);
5902 for (int i = 0; i < expr->properties()->length(); i++) {
5903 ObjectLiteral::Property* property = expr->properties()->at(i);
5904 if (property->is_computed_name()) return Bailout(kComputedPropertyName);
5905 if (property->IsCompileTimeValue()) continue;
5906
5907 Literal* key = property->key()->AsLiteral();
5908 Expression* value = property->value();
5909
5910 switch (property->kind()) {
5911 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5912 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
5913 // Fall through.
5914 case ObjectLiteral::Property::COMPUTED:
5915 // It is safe to use [[Put]] here because the boilerplate already
5916 // contains computed properties with an uninitialized value.
5917 if (key->value()->IsInternalizedString()) {
5918 if (property->emit_store()) {
5919 CHECK_ALIVE(VisitForValue(value));
5920 HValue* value = Pop();
5921
5922 Handle<Map> map = property->GetReceiverType();
5923 Handle<String> name = key->AsPropertyName();
5924 HValue* store;
5925 FeedbackVectorSlot slot = property->GetSlot();
5926 if (map.is_null()) {
5927 // If we don't know the monomorphic type, do a generic store.
5928 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal,
5929 name, value));
5930 } else {
5931 PropertyAccessInfo info(this, STORE, map, name);
5932 if (info.CanAccessMonomorphic()) {
5933 HValue* checked_literal = Add<HCheckMaps>(literal, map);
5934 DCHECK(!info.IsAccessorConstant());
5935 store = BuildMonomorphicAccess(
5936 &info, literal, checked_literal, value,
5937 BailoutId::None(), BailoutId::None());
5938 } else {
5939 CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot,
5940 literal, name, value));
5941 }
5942 }
5943 if (store->IsInstruction()) {
5944 AddInstruction(HInstruction::cast(store));
5945 }
5946 DCHECK(store->HasObservableSideEffects());
5947 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5948
5949 // Add [[HomeObject]] to function literals.
5950 if (FunctionLiteral::NeedsHomeObject(property->value())) {
5951 Handle<Symbol> sym = isolate()->factory()->home_object_symbol();
5952 HInstruction* store_home = BuildNamedGeneric(
5953 STORE, NULL, property->GetSlot(1), value, sym, literal);
5954 AddInstruction(store_home);
5955 DCHECK(store_home->HasObservableSideEffects());
5956 Add<HSimulate>(property->value()->id(), REMOVABLE_SIMULATE);
5957 }
5958 } else {
5959 CHECK_ALIVE(VisitForEffect(value));
5960 }
5961 break;
5962 }
5963 // Fall through.
5964 case ObjectLiteral::Property::PROTOTYPE:
5965 case ObjectLiteral::Property::SETTER:
5966 case ObjectLiteral::Property::GETTER:
5967 return Bailout(kObjectLiteralWithComplexProperty);
5968 default: UNREACHABLE();
5969 }
5970 }
5971
5972 if (expr->has_function()) {
5973 // Return the result of the transformation to fast properties
5974 // instead of the original since this operation changes the map
5975 // of the object. This makes sure that the original object won't
5976 // be used by other optimized code before it is transformed
5977 // (e.g. because of code motion).
5978 HToFastProperties* result = Add<HToFastProperties>(Pop());
5979 return ast_context()->ReturnValue(result);
5980 } else {
5981 return ast_context()->ReturnValue(Pop());
5982 }
5983}
5984
5985
5986void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5987 DCHECK(!HasStackOverflow());
5988 DCHECK(current_block() != NULL);
5989 DCHECK(current_block()->HasPredecessor());
5990 ZoneList<Expression*>* subexprs = expr->values();
5991 int length = subexprs->length();
5992 HInstruction* literal;
5993
5994 Handle<AllocationSite> site;
5995 Handle<LiteralsArray> literals(environment()->closure()->literals(),
5996 isolate());
5997 bool uninitialized = false;
5998 Handle<Object> literals_cell(literals->literal(expr->literal_index()),
5999 isolate());
6000 Handle<JSObject> boilerplate_object;
6001 if (literals_cell->IsUndefined()) {
6002 uninitialized = true;
6003 Handle<Object> raw_boilerplate;
6004 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
6005 isolate(), raw_boilerplate,
6006 Runtime::CreateArrayLiteralBoilerplate(
6007 isolate(), literals, expr->constant_elements(),
6008 is_strong(function_language_mode())),
6009 Bailout(kArrayBoilerplateCreationFailed));
6010
6011 boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
6012 AllocationSiteCreationContext creation_context(isolate());
6013 site = creation_context.EnterNewScope();
6014 if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
6015 return Bailout(kArrayBoilerplateCreationFailed);
6016 }
6017 creation_context.ExitScope(site, boilerplate_object);
6018 literals->set_literal(expr->literal_index(), *site);
6019
6020 if (boilerplate_object->elements()->map() ==
6021 isolate()->heap()->fixed_cow_array_map()) {
6022 isolate()->counters()->cow_arrays_created_runtime()->Increment();
6023 }
6024 } else {
6025 DCHECK(literals_cell->IsAllocationSite());
6026 site = Handle<AllocationSite>::cast(literals_cell);
6027 boilerplate_object = Handle<JSObject>(
6028 JSObject::cast(site->transition_info()), isolate());
6029 }
6030
6031 DCHECK(!boilerplate_object.is_null());
6032 DCHECK(site->SitePointsToLiteral());
6033
6034 ElementsKind boilerplate_elements_kind =
6035 boilerplate_object->GetElementsKind();
6036
6037 // Check whether to use fast or slow deep-copying for boilerplate.
6038 int max_properties = kMaxFastLiteralProperties;
6039 if (IsFastLiteral(boilerplate_object,
6040 kMaxFastLiteralDepth,
6041 &max_properties)) {
6042 AllocationSiteUsageContext site_context(isolate(), site, false);
6043 site_context.EnterNewScope();
6044 literal = BuildFastLiteral(boilerplate_object, &site_context);
6045 site_context.ExitScope(site, boilerplate_object);
6046 } else {
6047 NoObservableSideEffectsScope no_effects(this);
6048 // Boilerplate already exists and constant elements are never accessed,
6049 // pass an empty fixed array to the runtime function instead.
6050 Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
6051 int literal_index = expr->literal_index();
6052 int flags = expr->ComputeFlags(true);
6053
6054 Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
6055 Add<HConstant>(constants), Add<HConstant>(flags));
6056
6057 Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
6058 literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
6059
6060 // Register to deopt if the boilerplate ElementsKind changes.
6061 top_info()->dependencies()->AssumeTransitionStable(site);
6062 }
6063
6064 // The array is expected in the bailout environment during computation
6065 // of the property values and is the value of the entire expression.
6066 Push(literal);
6067
6068 HInstruction* elements = NULL;
6069
6070 for (int i = 0; i < length; i++) {
6071 Expression* subexpr = subexprs->at(i);
6072 if (subexpr->IsSpread()) {
6073 return Bailout(kSpread);
6074 }
6075
6076 // If the subexpression is a literal or a simple materialized literal it
6077 // is already set in the cloned array.
6078 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
6079
6080 CHECK_ALIVE(VisitForValue(subexpr));
6081 HValue* value = Pop();
6082 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
6083
6084 elements = AddLoadElements(literal);
6085
6086 HValue* key = Add<HConstant>(i);
6087
6088 switch (boilerplate_elements_kind) {
6089 case FAST_SMI_ELEMENTS:
6090 case FAST_HOLEY_SMI_ELEMENTS:
6091 case FAST_ELEMENTS:
6092 case FAST_HOLEY_ELEMENTS:
6093 case FAST_DOUBLE_ELEMENTS:
6094 case FAST_HOLEY_DOUBLE_ELEMENTS: {
6095 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value, nullptr,
6096 boilerplate_elements_kind);
6097 instr->SetUninitialized(uninitialized);
6098 break;
6099 }
6100 default:
6101 UNREACHABLE();
6102 break;
6103 }
6104
6105 Add<HSimulate>(expr->GetIdForElement(i));
6106 }
6107
6108 return ast_context()->ReturnValue(Pop());
6109}
6110
6111
6112HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
6113 Handle<Map> map) {
6114 BuildCheckHeapObject(object);
6115 return Add<HCheckMaps>(object, map);
6116}
6117
6118
6119HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
6120 PropertyAccessInfo* info,
6121 HValue* checked_object) {
6122 // See if this is a load for an immutable property
6123 if (checked_object->ActualValue()->IsConstant()) {
6124 Handle<Object> object(
6125 HConstant::cast(checked_object->ActualValue())->handle(isolate()));
6126
6127 if (object->IsJSObject()) {
6128 LookupIterator it(object, info->name(),
6129 LookupIterator::OWN_SKIP_INTERCEPTOR);
6130 Handle<Object> value = JSReceiver::GetDataProperty(&it);
6131 if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
6132 return New<HConstant>(value);
6133 }
6134 }
6135 }
6136
6137 HObjectAccess access = info->access();
6138 if (access.representation().IsDouble() &&
6139 (!FLAG_unbox_double_fields || !access.IsInobject())) {
6140 // Load the heap number.
6141 checked_object = Add<HLoadNamedField>(
6142 checked_object, nullptr,
6143 access.WithRepresentation(Representation::Tagged()));
6144 // Load the double value from it.
6145 access = HObjectAccess::ForHeapNumberValue();
6146 }
6147
6148 SmallMapList* map_list = info->field_maps();
6149 if (map_list->length() == 0) {
6150 return New<HLoadNamedField>(checked_object, checked_object, access);
6151 }
6152
6153 UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
6154 for (int i = 0; i < map_list->length(); ++i) {
6155 maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
6156 }
6157 return New<HLoadNamedField>(
6158 checked_object, checked_object, access, maps, info->field_type());
6159}
6160
6161
6162HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
6163 PropertyAccessInfo* info,
6164 HValue* checked_object,
6165 HValue* value) {
6166 bool transition_to_field = info->IsTransition();
6167 // TODO(verwaest): Move this logic into PropertyAccessInfo.
6168 HObjectAccess field_access = info->access();
6169
6170 HStoreNamedField *instr;
6171 if (field_access.representation().IsDouble() &&
6172 (!FLAG_unbox_double_fields || !field_access.IsInobject())) {
6173 HObjectAccess heap_number_access =
6174 field_access.WithRepresentation(Representation::Tagged());
6175 if (transition_to_field) {
6176 // The store requires a mutable HeapNumber to be allocated.
6177 NoObservableSideEffectsScope no_side_effects(this);
6178 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
6179
6180 // TODO(hpayer): Allocation site pretenuring support.
6181 HInstruction* heap_number = Add<HAllocate>(heap_number_size,
6182 HType::HeapObject(),
6183 NOT_TENURED,
6184 MUTABLE_HEAP_NUMBER_TYPE);
6185 AddStoreMapConstant(
6186 heap_number, isolate()->factory()->mutable_heap_number_map());
6187 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
6188 value);
6189 instr = New<HStoreNamedField>(checked_object->ActualValue(),
6190 heap_number_access,
6191 heap_number);
6192 } else {
6193 // Already holds a HeapNumber; load the box and write its value field.
6194 HInstruction* heap_number =
6195 Add<HLoadNamedField>(checked_object, nullptr, heap_number_access);
6196 instr = New<HStoreNamedField>(heap_number,
6197 HObjectAccess::ForHeapNumberValue(),
6198 value, STORE_TO_INITIALIZED_ENTRY);
6199 }
6200 } else {
6201 if (field_access.representation().IsHeapObject()) {
6202 BuildCheckHeapObject(value);
6203 }
6204
6205 if (!info->field_maps()->is_empty()) {
6206 DCHECK(field_access.representation().IsHeapObject());
6207 value = Add<HCheckMaps>(value, info->field_maps());
6208 }
6209
6210 // This is a normal store.
6211 instr = New<HStoreNamedField>(
6212 checked_object->ActualValue(), field_access, value,
6213 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY);
6214 }
6215
6216 if (transition_to_field) {
6217 Handle<Map> transition(info->transition());
6218 DCHECK(!transition->is_deprecated());
6219 instr->SetTransition(Add<HConstant>(transition));
6220 }
6221 return instr;
6222}
6223
6224
6225bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
6226 PropertyAccessInfo* info) {
6227 if (!CanInlinePropertyAccess(map_)) return false;
6228
6229 // Currently only handle Type::Number as a polymorphic case.
6230 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6231 // instruction.
6232 if (IsNumberType()) return false;
6233
6234 // Values are only compatible for monomorphic load if they all behave the same
6235 // regarding value wrappers.
6236 if (IsValueWrapped() != info->IsValueWrapped()) return false;
6237
6238 if (!LookupDescriptor()) return false;
6239
6240 if (!IsFound()) {
6241 return (!info->IsFound() || info->has_holder()) &&
6242 map()->prototype() == info->map()->prototype();
6243 }
6244
6245 // Mismatch if the other access info found the property in the prototype
6246 // chain.
6247 if (info->has_holder()) return false;
6248
6249 if (IsAccessorConstant()) {
6250 return accessor_.is_identical_to(info->accessor_) &&
6251 api_holder_.is_identical_to(info->api_holder_);
6252 }
6253
6254 if (IsDataConstant()) {
6255 return constant_.is_identical_to(info->constant_);
6256 }
6257
6258 DCHECK(IsData());
6259 if (!info->IsData()) return false;
6260
6261 Representation r = access_.representation();
6262 if (IsLoad()) {
6263 if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
6264 } else {
6265 if (!info->access_.representation().IsCompatibleForStore(r)) return false;
6266 }
6267 if (info->access_.offset() != access_.offset()) return false;
6268 if (info->access_.IsInobject() != access_.IsInobject()) return false;
6269 if (IsLoad()) {
6270 if (field_maps_.is_empty()) {
6271 info->field_maps_.Clear();
6272 } else if (!info->field_maps_.is_empty()) {
6273 for (int i = 0; i < field_maps_.length(); ++i) {
6274 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
6275 }
6276 info->field_maps_.Sort();
6277 }
6278 } else {
6279 // We can only merge stores that agree on their field maps. The comparison
6280 // below is safe, since we keep the field maps sorted.
6281 if (field_maps_.length() != info->field_maps_.length()) return false;
6282 for (int i = 0; i < field_maps_.length(); ++i) {
6283 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
6284 return false;
6285 }
6286 }
6287 }
6288 info->GeneralizeRepresentation(r);
6289 info->field_type_ = info->field_type_.Combine(field_type_);
6290 return true;
6291}
6292
6293
6294bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
6295 if (!map_->IsJSObjectMap()) return true;
6296 LookupDescriptor(*map_, *name_);
6297 return LoadResult(map_);
6298}
6299
6300
6301bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
6302 if (!IsLoad() && IsProperty() && IsReadOnly()) {
6303 return false;
6304 }
6305
6306 if (IsData()) {
6307 // Construct the object field access.
6308 int index = GetLocalFieldIndexFromMap(map);
6309 access_ = HObjectAccess::ForField(map, index, representation(), name_);
6310
6311 // Load field map for heap objects.
6312 return LoadFieldMaps(map);
6313 } else if (IsAccessorConstant()) {
6314 Handle<Object> accessors = GetAccessorsFromMap(map);
6315 if (!accessors->IsAccessorPair()) return false;
6316 Object* raw_accessor =
6317 IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
6318 : Handle<AccessorPair>::cast(accessors)->setter();
6319 if (!raw_accessor->IsJSFunction()) return false;
6320 Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor));
6321 if (accessor->shared()->IsApiFunction()) {
6322 CallOptimization call_optimization(accessor);
6323 if (call_optimization.is_simple_api_call()) {
6324 CallOptimization::HolderLookup holder_lookup;
6325 api_holder_ =
6326 call_optimization.LookupHolderOfExpectedType(map_, &holder_lookup);
6327 }
6328 }
6329 accessor_ = accessor;
6330 } else if (IsDataConstant()) {
6331 constant_ = GetConstantFromMap(map);
6332 }
6333
6334 return true;
6335}
6336
6337
6338bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
6339 Handle<Map> map) {
6340 // Clear any previously collected field maps/type.
6341 field_maps_.Clear();
6342 field_type_ = HType::Tagged();
6343
6344 // Figure out the field type from the accessor map.
6345 Handle<HeapType> field_type = GetFieldTypeFromMap(map);
6346
6347 // Collect the (stable) maps from the field type.
6348 int num_field_maps = field_type->NumClasses();
6349 if (num_field_maps > 0) {
6350 DCHECK(access_.representation().IsHeapObject());
6351 field_maps_.Reserve(num_field_maps, zone());
6352 HeapType::Iterator<Map> it = field_type->Classes();
6353 while (!it.Done()) {
6354 Handle<Map> field_map = it.Current();
6355 if (!field_map->is_stable()) {
6356 field_maps_.Clear();
6357 break;
6358 }
6359 field_maps_.Add(field_map, zone());
6360 it.Advance();
6361 }
6362 }
6363
6364 if (field_maps_.is_empty()) {
6365 // Store is not safe if the field map was cleared.
6366 return IsLoad() || !field_type->Is(HeapType::None());
6367 }
6368
6369 field_maps_.Sort();
6370 DCHECK_EQ(num_field_maps, field_maps_.length());
6371
6372 // Determine field HType from field HeapType.
6373 field_type_ = HType::FromType<HeapType>(field_type);
6374 DCHECK(field_type_.IsHeapObject());
6375
6376 // Add dependency on the map that introduced the field.
6377 top_info()->dependencies()->AssumeFieldType(GetFieldOwnerFromMap(map));
6378 return true;
6379}
6380
6381
6382bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
6383 Handle<Map> map = this->map();
6384
6385 while (map->prototype()->IsJSObject()) {
6386 holder_ = handle(JSObject::cast(map->prototype()));
6387 if (holder_->map()->is_deprecated()) {
6388 JSObject::TryMigrateInstance(holder_);
6389 }
6390 map = Handle<Map>(holder_->map());
6391 if (!CanInlinePropertyAccess(map)) {
6392 NotFound();
6393 return false;
6394 }
6395 LookupDescriptor(*map, *name_);
6396 if (IsFound()) return LoadResult(map);
6397 }
6398
6399 NotFound();
6400 return !map->prototype()->IsJSReceiver();
6401}
6402
6403
6404bool HOptimizedGraphBuilder::PropertyAccessInfo::IsIntegerIndexedExotic() {
6405 InstanceType instance_type = map_->instance_type();
6406 return instance_type == JS_TYPED_ARRAY_TYPE && name_->IsString() &&
6407 IsSpecialIndex(isolate()->unicode_cache(), String::cast(*name_));
6408}
6409
6410
6411bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
6412 if (!CanInlinePropertyAccess(map_)) return false;
6413 if (IsJSObjectFieldAccessor()) return IsLoad();
6414 if (IsJSArrayBufferViewFieldAccessor()) return IsLoad();
6415 if (map_->IsJSFunctionMap() && map_->is_constructor() &&
6416 !map_->has_non_instance_prototype() &&
6417 name_.is_identical_to(isolate()->factory()->prototype_string())) {
6418 return IsLoad();
6419 }
6420 if (!LookupDescriptor()) return false;
6421 if (IsFound()) return IsLoad() || !IsReadOnly();
6422 if (IsIntegerIndexedExotic()) return false;
6423 if (!LookupInPrototypes()) return false;
6424 if (IsLoad()) return true;
6425
6426 if (IsAccessorConstant()) return true;
6427 LookupTransition(*map_, *name_, NONE);
6428 if (IsTransitionToData() && map_->unused_property_fields() > 0) {
6429 // Construct the object field access.
6430 int descriptor = transition()->LastAdded();
6431 int index =
6432 transition()->instance_descriptors()->GetFieldIndex(descriptor) -
6433 map_->GetInObjectProperties();
6434 PropertyDetails details =
6435 transition()->instance_descriptors()->GetDetails(descriptor);
6436 Representation representation = details.representation();
6437 access_ = HObjectAccess::ForField(map_, index, representation, name_);
6438
6439 // Load field map for heap objects.
6440 return LoadFieldMaps(transition());
6441 }
6442 return false;
6443}
6444
6445
6446bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
6447 SmallMapList* maps) {
6448 DCHECK(map_.is_identical_to(maps->first()));
6449 if (!CanAccessMonomorphic()) return false;
6450 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6451 if (maps->length() > kMaxLoadPolymorphism) return false;
6452 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6453 if (GetJSObjectFieldAccess(&access)) {
6454 for (int i = 1; i < maps->length(); ++i) {
6455 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6456 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6457 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6458 if (!access.Equals(test_access)) return false;
6459 }
6460 return true;
6461 }
6462 if (GetJSArrayBufferViewFieldAccess(&access)) {
6463 for (int i = 1; i < maps->length(); ++i) {
6464 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6465 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default
6466 if (!test_info.GetJSArrayBufferViewFieldAccess(&test_access)) {
6467 return false;
6468 }
6469 if (!access.Equals(test_access)) return false;
6470 }
6471 return true;
6472 }
6473
6474 // Currently only handle numbers as a polymorphic case.
6475 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6476 // instruction.
6477 if (IsNumberType()) return false;
6478
6479 // Multiple maps cannot transition to the same target map.
6480 DCHECK(!IsLoad() || !IsTransition());
6481 if (IsTransition() && maps->length() > 1) return false;
6482
6483 for (int i = 1; i < maps->length(); ++i) {
6484 PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6485 if (!test_info.IsCompatible(this)) return false;
6486 }
6487
6488 return true;
6489}
6490
6491
6492Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
6493 Handle<JSFunction> ctor;
6494 if (Map::GetConstructorFunction(
6495 map_, handle(current_info()->closure()->context()->native_context()))
6496 .ToHandle(&ctor)) {
6497 return handle(ctor->initial_map());
6498 }
6499 return map_;
6500}
6501
6502
6503static bool NeedsWrapping(Handle<Map> map, Handle<JSFunction> target) {
6504 return !map->IsJSObjectMap() &&
6505 is_sloppy(target->shared()->language_mode()) &&
6506 !target->shared()->native();
6507}
6508
6509
6510bool HOptimizedGraphBuilder::PropertyAccessInfo::NeedsWrappingFor(
6511 Handle<JSFunction> target) const {
6512 return NeedsWrapping(map_, target);
6513}
6514
6515
6516HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess(
6517 PropertyAccessInfo* info, HValue* object, HValue* checked_object,
6518 HValue* value, BailoutId ast_id, BailoutId return_id,
6519 bool can_inline_accessor) {
6520 HObjectAccess access = HObjectAccess::ForMap(); // bogus default
6521 if (info->GetJSObjectFieldAccess(&access)) {
6522 DCHECK(info->IsLoad());
6523 return New<HLoadNamedField>(object, checked_object, access);
6524 }
6525
6526 if (info->GetJSArrayBufferViewFieldAccess(&access)) {
6527 DCHECK(info->IsLoad());
6528 checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
6529 return New<HLoadNamedField>(object, checked_object, access);
6530 }
6531
6532 if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
6533 info->map()->IsJSFunctionMap() && info->map()->is_constructor()) {
6534 DCHECK(!info->map()->has_non_instance_prototype());
6535 return New<HLoadFunctionPrototype>(checked_object);
6536 }
6537
6538 HValue* checked_holder = checked_object;
6539 if (info->has_holder()) {
6540 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6541 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6542 }
6543
6544 if (!info->IsFound()) {
6545 DCHECK(info->IsLoad());
6546 if (is_strong(function_language_mode())) {
6547 return New<HCallRuntime>(
6548 Runtime::FunctionForId(Runtime::kThrowStrongModeImplicitConversion),
6549 0);
6550 } else {
6551 return graph()->GetConstantUndefined();
6552 }
6553 }
6554
6555 if (info->IsData()) {
6556 if (info->IsLoad()) {
6557 return BuildLoadNamedField(info, checked_holder);
6558 } else {
6559 return BuildStoreNamedField(info, checked_object, value);
6560 }
6561 }
6562
6563 if (info->IsTransition()) {
6564 DCHECK(!info->IsLoad());
6565 return BuildStoreNamedField(info, checked_object, value);
6566 }
6567
6568 if (info->IsAccessorConstant()) {
6569 Push(checked_object);
6570 int argument_count = 1;
6571 if (!info->IsLoad()) {
6572 argument_count = 2;
6573 Push(value);
6574 }
6575
6576 if (info->NeedsWrappingFor(info->accessor())) {
6577 HValue* function = Add<HConstant>(info->accessor());
6578 PushArgumentsFromEnvironment(argument_count);
6579 return New<HCallFunction>(function, argument_count,
6580 ConvertReceiverMode::kNotNullOrUndefined);
6581 } else if (FLAG_inline_accessors && can_inline_accessor) {
6582 bool success = info->IsLoad()
6583 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6584 : TryInlineSetter(
6585 info->accessor(), info->map(), ast_id, return_id, value);
6586 if (success || HasStackOverflow()) return NULL;
6587 }
6588
6589 PushArgumentsFromEnvironment(argument_count);
6590 return BuildCallConstantFunction(info->accessor(), argument_count);
6591 }
6592
6593 DCHECK(info->IsDataConstant());
6594 if (info->IsLoad()) {
6595 return New<HConstant>(info->constant());
6596 } else {
6597 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6598 }
6599}
6600
6601
6602void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
6603 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
6604 BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value,
6605 SmallMapList* maps, Handle<Name> name) {
6606 // Something did not match; must use a polymorphic load.
6607 int count = 0;
6608 HBasicBlock* join = NULL;
6609 HBasicBlock* number_block = NULL;
6610 bool handled_string = false;
6611
6612 bool handle_smi = false;
6613 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6614 int i;
6615 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6616 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6617 if (info.IsStringType()) {
6618 if (handled_string) continue;
6619 handled_string = true;
6620 }
6621 if (info.CanAccessMonomorphic()) {
6622 count++;
6623 if (info.IsNumberType()) {
6624 handle_smi = true;
6625 break;
6626 }
6627 }
6628 }
6629
6630 if (i < maps->length()) {
6631 count = -1;
6632 maps->Clear();
6633 } else {
6634 count = 0;
6635 }
6636 HControlInstruction* smi_check = NULL;
6637 handled_string = false;
6638
6639 for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6640 PropertyAccessInfo info(this, access_type, maps->at(i), name);
6641 if (info.IsStringType()) {
6642 if (handled_string) continue;
6643 handled_string = true;
6644 }
6645 if (!info.CanAccessMonomorphic()) continue;
6646
6647 if (count == 0) {
6648 join = graph()->CreateBasicBlock();
6649 if (handle_smi) {
6650 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6651 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6652 number_block = graph()->CreateBasicBlock();
6653 smi_check = New<HIsSmiAndBranch>(
6654 object, empty_smi_block, not_smi_block);
6655 FinishCurrentBlock(smi_check);
6656 GotoNoSimulate(empty_smi_block, number_block);
6657 set_current_block(not_smi_block);
6658 } else {
6659 BuildCheckHeapObject(object);
6660 }
6661 }
6662 ++count;
6663 HBasicBlock* if_true = graph()->CreateBasicBlock();
6664 HBasicBlock* if_false = graph()->CreateBasicBlock();
6665 HUnaryControlInstruction* compare;
6666
6667 HValue* dependency;
6668 if (info.IsNumberType()) {
6669 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6670 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6671 dependency = smi_check;
6672 } else if (info.IsStringType()) {
6673 compare = New<HIsStringAndBranch>(object, if_true, if_false);
6674 dependency = compare;
6675 } else {
6676 compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6677 dependency = compare;
6678 }
6679 FinishCurrentBlock(compare);
6680
6681 if (info.IsNumberType()) {
6682 GotoNoSimulate(if_true, number_block);
6683 if_true = number_block;
6684 }
6685
6686 set_current_block(if_true);
6687
6688 HValue* access =
6689 BuildMonomorphicAccess(&info, object, dependency, value, ast_id,
6690 return_id, FLAG_polymorphic_inlining);
6691
6692 HValue* result = NULL;
6693 switch (access_type) {
6694 case LOAD:
6695 result = access;
6696 break;
6697 case STORE:
6698 result = value;
6699 break;
6700 }
6701
6702 if (access == NULL) {
6703 if (HasStackOverflow()) return;
6704 } else {
6705 if (access->IsInstruction()) {
6706 HInstruction* instr = HInstruction::cast(access);
6707 if (!instr->IsLinked()) AddInstruction(instr);
6708 }
6709 if (!ast_context()->IsEffect()) Push(result);
6710 }
6711
6712 if (current_block() != NULL) Goto(join);
6713 set_current_block(if_false);
6714 }
6715
6716 // Finish up. Unconditionally deoptimize if we've handled all the maps we
6717 // know about and do not want to handle ones we've never seen. Otherwise
6718 // use a generic IC.
6719 if (count == maps->length() && FLAG_deoptimize_uncommon_cases) {
6720 FinishExitWithHardDeoptimization(
6721 Deoptimizer::kUnknownMapInPolymorphicAccess);
6722 } else {
6723 HInstruction* instr =
6724 BuildNamedGeneric(access_type, expr, slot, object, name, value);
6725 AddInstruction(instr);
6726 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6727
6728 if (join != NULL) {
6729 Goto(join);
6730 } else {
6731 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6732 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6733 return;
6734 }
6735 }
6736
6737 DCHECK(join != NULL);
6738 if (join->HasPredecessor()) {
6739 join->SetJoinId(ast_id);
6740 set_current_block(join);
6741 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6742 } else {
6743 set_current_block(NULL);
6744 }
6745}
6746
6747
6748static bool ComputeReceiverTypes(Expression* expr,
6749 HValue* receiver,
6750 SmallMapList** t,
6751 Zone* zone) {
6752 SmallMapList* maps = expr->GetReceiverTypes();
6753 *t = maps;
6754 bool monomorphic = expr->IsMonomorphic();
6755 if (maps != NULL && receiver->HasMonomorphicJSObjectType()) {
6756 Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6757 maps->FilterForPossibleTransitions(root_map);
6758 monomorphic = maps->length() == 1;
6759 }
6760 return monomorphic && CanInlinePropertyAccess(maps->first());
6761}
6762
6763
6764static bool AreStringTypes(SmallMapList* maps) {
6765 for (int i = 0; i < maps->length(); i++) {
6766 if (maps->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6767 }
6768 return true;
6769}
6770
6771
6772void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop,
6773 FeedbackVectorSlot slot,
6774 BailoutId ast_id, BailoutId return_id,
6775 bool is_uninitialized) {
6776 if (!prop->key()->IsPropertyName()) {
6777 // Keyed store.
6778 HValue* value = Pop();
6779 HValue* key = Pop();
6780 HValue* object = Pop();
6781 bool has_side_effects = false;
6782 HValue* result =
6783 HandleKeyedElementAccess(object, key, value, expr, slot, ast_id,
6784 return_id, STORE, &has_side_effects);
6785 if (has_side_effects) {
6786 if (!ast_context()->IsEffect()) Push(value);
6787 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6788 if (!ast_context()->IsEffect()) Drop(1);
6789 }
6790 if (result == NULL) return;
6791 return ast_context()->ReturnValue(value);
6792 }
6793
6794 // Named store.
6795 HValue* value = Pop();
6796 HValue* object = Pop();
6797
6798 Literal* key = prop->key()->AsLiteral();
6799 Handle<String> name = Handle<String>::cast(key->value());
6800 DCHECK(!name.is_null());
6801
6802 HValue* access = BuildNamedAccess(STORE, ast_id, return_id, expr, slot,
6803 object, name, value, is_uninitialized);
6804 if (access == NULL) return;
6805
6806 if (!ast_context()->IsEffect()) Push(value);
6807 if (access->IsInstruction()) AddInstruction(HInstruction::cast(access));
6808 if (access->HasObservableSideEffects()) {
6809 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6810 }
6811 if (!ast_context()->IsEffect()) Drop(1);
6812 return ast_context()->ReturnValue(value);
6813}
6814
6815
6816void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6817 Property* prop = expr->target()->AsProperty();
6818 DCHECK(prop != NULL);
6819 CHECK_ALIVE(VisitForValue(prop->obj()));
6820 if (!prop->key()->IsPropertyName()) {
6821 CHECK_ALIVE(VisitForValue(prop->key()));
6822 }
6823 CHECK_ALIVE(VisitForValue(expr->value()));
6824 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
6825 expr->AssignmentId(), expr->IsUninitialized());
6826}
6827
6828
6829// Because not every expression has a position and there is not common
6830// superclass of Assignment and CountOperation, we cannot just pass the
6831// owning expression instead of position and ast_id separately.
6832void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
6833 Variable* var, HValue* value, FeedbackVectorSlot slot, BailoutId ast_id) {
6834 Handle<JSGlobalObject> global(current_info()->global_object());
6835
6836 // Lookup in script contexts.
6837 {
6838 Handle<ScriptContextTable> script_contexts(
6839 global->native_context()->script_context_table());
6840 ScriptContextTable::LookupResult lookup;
6841 if (ScriptContextTable::Lookup(script_contexts, var->name(), &lookup)) {
6842 if (lookup.mode == CONST) {
6843 return Bailout(kNonInitializerAssignmentToConst);
6844 }
6845 Handle<Context> script_context =
6846 ScriptContextTable::GetContext(script_contexts, lookup.context_index);
6847
6848 Handle<Object> current_value =
6849 FixedArray::get(script_context, lookup.slot_index);
6850
6851 // If the values is not the hole, it will stay initialized,
6852 // so no need to generate a check.
6853 if (*current_value == *isolate()->factory()->the_hole_value()) {
6854 return Bailout(kReferenceToUninitializedVariable);
6855 }
6856
6857 HStoreNamedField* instr = Add<HStoreNamedField>(
6858 Add<HConstant>(script_context),
6859 HObjectAccess::ForContextSlot(lookup.slot_index), value);
6860 USE(instr);
6861 DCHECK(instr->HasObservableSideEffects());
6862 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6863 return;
6864 }
6865 }
6866
6867 LookupIterator it(global, var->name(), LookupIterator::OWN);
6868 GlobalPropertyAccess type = LookupGlobalProperty(var, &it, STORE);
6869 if (type == kUseCell) {
6870 Handle<PropertyCell> cell = it.GetPropertyCell();
6871 top_info()->dependencies()->AssumePropertyCell(cell);
6872 auto cell_type = it.property_details().cell_type();
6873 if (cell_type == PropertyCellType::kConstant ||
6874 cell_type == PropertyCellType::kUndefined) {
6875 Handle<Object> constant(cell->value(), isolate());
6876 if (value->IsConstant()) {
6877 HConstant* c_value = HConstant::cast(value);
6878 if (!constant.is_identical_to(c_value->handle(isolate()))) {
6879 Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
6880 Deoptimizer::EAGER);
6881 }
6882 } else {
6883 HValue* c_constant = Add<HConstant>(constant);
6884 IfBuilder builder(this);
6885 if (constant->IsNumber()) {
6886 builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6887 } else {
6888 builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6889 }
6890 builder.Then();
6891 builder.Else();
6892 Add<HDeoptimize>(Deoptimizer::kConstantGlobalVariableAssignment,
6893 Deoptimizer::EAGER);
6894 builder.End();
6895 }
6896 }
6897 HConstant* cell_constant = Add<HConstant>(cell);
6898 auto access = HObjectAccess::ForPropertyCellValue();
6899 if (cell_type == PropertyCellType::kConstantType) {
6900 switch (cell->GetConstantType()) {
6901 case PropertyCellConstantType::kSmi:
6902 access = access.WithRepresentation(Representation::Smi());
6903 break;
6904 case PropertyCellConstantType::kStableMap: {
6905 // The map may no longer be stable, deopt if it's ever different from
6906 // what is currently there, which will allow for restablization.
6907 Handle<Map> map(HeapObject::cast(cell->value())->map());
6908 Add<HCheckHeapObject>(value);
6909 value = Add<HCheckMaps>(value, map);
6910 access = access.WithRepresentation(Representation::HeapObject());
6911 break;
6912 }
6913 }
6914 }
6915 HInstruction* instr = Add<HStoreNamedField>(cell_constant, access, value);
6916 instr->ClearChangesFlag(kInobjectFields);
6917 instr->SetChangesFlag(kGlobalVars);
6918 if (instr->HasObservableSideEffects()) {
6919 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6920 }
6921 } else {
6922 HValue* global_object = Add<HLoadNamedField>(
6923 BuildGetNativeContext(), nullptr,
6924 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
6925 HStoreNamedGeneric* instr =
6926 Add<HStoreNamedGeneric>(global_object, var->name(), value,
6927 function_language_mode(), PREMONOMORPHIC);
6928 Handle<TypeFeedbackVector> vector =
6929 handle(current_feedback_vector(), isolate());
6930 instr->SetVectorAndSlot(vector, slot);
6931 USE(instr);
6932 DCHECK(instr->HasObservableSideEffects());
6933 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6934 }
6935}
6936
6937
6938void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
6939 Expression* target = expr->target();
6940 VariableProxy* proxy = target->AsVariableProxy();
6941 Property* prop = target->AsProperty();
6942 DCHECK(proxy == NULL || prop == NULL);
6943
6944 // We have a second position recorded in the FullCodeGenerator to have
6945 // type feedback for the binary operation.
6946 BinaryOperation* operation = expr->binary_operation();
6947
6948 if (proxy != NULL) {
6949 Variable* var = proxy->var();
6950 if (var->mode() == LET) {
6951 return Bailout(kUnsupportedLetCompoundAssignment);
6952 }
6953
6954 CHECK_ALIVE(VisitForValue(operation));
6955
6956 switch (var->location()) {
6957 case VariableLocation::GLOBAL:
6958 case VariableLocation::UNALLOCATED:
6959 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
6960 expr->AssignmentId());
6961 break;
6962
6963 case VariableLocation::PARAMETER:
6964 case VariableLocation::LOCAL:
6965 if (var->mode() == CONST_LEGACY) {
6966 return Bailout(kUnsupportedConstCompoundAssignment);
6967 }
6968 if (var->mode() == CONST) {
6969 return Bailout(kNonInitializerAssignmentToConst);
6970 }
6971 BindIfLive(var, Top());
6972 break;
6973
6974 case VariableLocation::CONTEXT: {
6975 // Bail out if we try to mutate a parameter value in a function
6976 // using the arguments object. We do not (yet) correctly handle the
6977 // arguments property of the function.
6978 if (current_info()->scope()->arguments() != NULL) {
6979 // Parameters will be allocated to context slots. We have no
6980 // direct way to detect that the variable is a parameter so we do
6981 // a linear search of the parameter variables.
6982 int count = current_info()->scope()->num_parameters();
6983 for (int i = 0; i < count; ++i) {
6984 if (var == current_info()->scope()->parameter(i)) {
6985 Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6986 }
6987 }
6988 }
6989
6990 HStoreContextSlot::Mode mode;
6991
6992 switch (var->mode()) {
6993 case LET:
6994 mode = HStoreContextSlot::kCheckDeoptimize;
6995 break;
6996 case CONST:
6997 return Bailout(kNonInitializerAssignmentToConst);
6998 case CONST_LEGACY:
6999 return ast_context()->ReturnValue(Pop());
7000 default:
7001 mode = HStoreContextSlot::kNoCheck;
7002 }
7003
7004 HValue* context = BuildContextChainWalk(var);
7005 HStoreContextSlot* instr = Add<HStoreContextSlot>(
7006 context, var->index(), mode, Top());
7007 if (instr->HasObservableSideEffects()) {
7008 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
7009 }
7010 break;
7011 }
7012
7013 case VariableLocation::LOOKUP:
7014 return Bailout(kCompoundAssignmentToLookupSlot);
7015 }
7016 return ast_context()->ReturnValue(Pop());
7017
7018 } else if (prop != NULL) {
7019 CHECK_ALIVE(VisitForValue(prop->obj()));
7020 HValue* object = Top();
7021 HValue* key = NULL;
7022 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
7023 CHECK_ALIVE(VisitForValue(prop->key()));
7024 key = Top();
7025 }
7026
7027 CHECK_ALIVE(PushLoad(prop, object, key));
7028
7029 CHECK_ALIVE(VisitForValue(expr->value()));
7030 HValue* right = Pop();
7031 HValue* left = Pop();
7032
7033 Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
7034
7035 BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
7036 expr->AssignmentId(), expr->IsUninitialized());
7037 } else {
7038 return Bailout(kInvalidLhsInCompoundAssignment);
7039 }
7040}
7041
7042
7043void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
7044 DCHECK(!HasStackOverflow());
7045 DCHECK(current_block() != NULL);
7046 DCHECK(current_block()->HasPredecessor());
7047
7048 VariableProxy* proxy = expr->target()->AsVariableProxy();
7049 Property* prop = expr->target()->AsProperty();
7050 DCHECK(proxy == NULL || prop == NULL);
7051
7052 if (expr->is_compound()) {
7053 HandleCompoundAssignment(expr);
7054 return;
7055 }
7056
7057 if (prop != NULL) {
7058 HandlePropertyAssignment(expr);
7059 } else if (proxy != NULL) {
7060 Variable* var = proxy->var();
7061
7062 if (var->mode() == CONST) {
7063 if (expr->op() != Token::INIT) {
7064 return Bailout(kNonInitializerAssignmentToConst);
7065 }
7066 } else if (var->mode() == CONST_LEGACY) {
7067 if (expr->op() != Token::INIT) {
7068 CHECK_ALIVE(VisitForValue(expr->value()));
7069 return ast_context()->ReturnValue(Pop());
7070 }
7071
7072 if (var->IsStackAllocated()) {
7073 // We insert a use of the old value to detect unsupported uses of const
7074 // variables (e.g. initialization inside a loop).
7075 HValue* old_value = environment()->Lookup(var);
7076 Add<HUseConst>(old_value);
7077 }
7078 }
7079
7080 if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
7081
7082 // Handle the assignment.
7083 switch (var->location()) {
7084 case VariableLocation::GLOBAL:
7085 case VariableLocation::UNALLOCATED:
7086 CHECK_ALIVE(VisitForValue(expr->value()));
7087 HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
7088 expr->AssignmentId());
7089 return ast_context()->ReturnValue(Pop());
7090
7091 case VariableLocation::PARAMETER:
7092 case VariableLocation::LOCAL: {
7093 // Perform an initialization check for let declared variables
7094 // or parameters.
7095 if (var->mode() == LET && expr->op() == Token::ASSIGN) {
7096 HValue* env_value = environment()->Lookup(var);
7097 if (env_value == graph()->GetConstantHole()) {
7098 return Bailout(kAssignmentToLetVariableBeforeInitialization);
7099 }
7100 }
7101 // We do not allow the arguments object to occur in a context where it
7102 // may escape, but assignments to stack-allocated locals are
7103 // permitted.
7104 CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
7105 HValue* value = Pop();
7106 BindIfLive(var, value);
7107 return ast_context()->ReturnValue(value);
7108 }
7109
7110 case VariableLocation::CONTEXT: {
7111 // Bail out if we try to mutate a parameter value in a function using
7112 // the arguments object. We do not (yet) correctly handle the
7113 // arguments property of the function.
7114 if (current_info()->scope()->arguments() != NULL) {
7115 // Parameters will rewrite to context slots. We have no direct way
7116 // to detect that the variable is a parameter.
7117 int count = current_info()->scope()->num_parameters();
7118 for (int i = 0; i < count; ++i) {
7119 if (var == current_info()->scope()->parameter(i)) {
7120 return Bailout(kAssignmentToParameterInArgumentsObject);
7121 }
7122 }
7123 }
7124
7125 CHECK_ALIVE(VisitForValue(expr->value()));
7126 HStoreContextSlot::Mode mode;
7127 if (expr->op() == Token::ASSIGN) {
7128 switch (var->mode()) {
7129 case LET:
7130 mode = HStoreContextSlot::kCheckDeoptimize;
7131 break;
7132 case CONST:
7133 // This case is checked statically so no need to
7134 // perform checks here
7135 UNREACHABLE();
7136 case CONST_LEGACY:
7137 return ast_context()->ReturnValue(Pop());
7138 default:
7139 mode = HStoreContextSlot::kNoCheck;
7140 }
7141 } else {
7142 DCHECK_EQ(Token::INIT, expr->op());
7143 if (var->mode() == CONST_LEGACY) {
7144 mode = HStoreContextSlot::kCheckIgnoreAssignment;
7145 } else {
7146 mode = HStoreContextSlot::kNoCheck;
7147 }
7148 }
7149
7150 HValue* context = BuildContextChainWalk(var);
7151 HStoreContextSlot* instr = Add<HStoreContextSlot>(
7152 context, var->index(), mode, Top());
7153 if (instr->HasObservableSideEffects()) {
7154 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
7155 }
7156 return ast_context()->ReturnValue(Pop());
7157 }
7158
7159 case VariableLocation::LOOKUP:
7160 return Bailout(kAssignmentToLOOKUPVariable);
7161 }
7162 } else {
7163 return Bailout(kInvalidLeftHandSideInAssignment);
7164 }
7165}
7166
7167
7168void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
7169 // Generators are not optimized, so we should never get here.
7170 UNREACHABLE();
7171}
7172
7173
7174void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
7175 DCHECK(!HasStackOverflow());
7176 DCHECK(current_block() != NULL);
7177 DCHECK(current_block()->HasPredecessor());
7178 if (!ast_context()->IsEffect()) {
7179 // The parser turns invalid left-hand sides in assignments into throw
7180 // statements, which may not be in effect contexts. We might still try
7181 // to optimize such functions; bail out now if we do.
7182 return Bailout(kInvalidLeftHandSideInAssignment);
7183 }
7184 CHECK_ALIVE(VisitForValue(expr->exception()));
7185
7186 HValue* value = environment()->Pop();
7187 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
7188 Add<HPushArguments>(value);
7189 Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
7190 Add<HSimulate>(expr->id());
7191
7192 // If the throw definitely exits the function, we can finish with a dummy
7193 // control flow at this point. This is not the case if the throw is inside
7194 // an inlined function which may be replaced.
7195 if (call_context() == NULL) {
7196 FinishExitCurrentBlock(New<HAbnormalExit>());
7197 }
7198}
7199
7200
7201HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
7202 if (string->IsConstant()) {
7203 HConstant* c_string = HConstant::cast(string);
7204 if (c_string->HasStringValue()) {
7205 return Add<HConstant>(c_string->StringValue()->map()->instance_type());
7206 }
7207 }
7208 return Add<HLoadNamedField>(
7209 Add<HLoadNamedField>(string, nullptr, HObjectAccess::ForMap()), nullptr,
7210 HObjectAccess::ForMapInstanceType());
7211}
7212
7213
7214HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
7215 return AddInstruction(BuildLoadStringLength(string));
7216}
7217
7218
7219HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) {
7220 if (string->IsConstant()) {
7221 HConstant* c_string = HConstant::cast(string);
7222 if (c_string->HasStringValue()) {
7223 return New<HConstant>(c_string->StringValue()->length());
7224 }
7225 }
7226 return New<HLoadNamedField>(string, nullptr,
7227 HObjectAccess::ForStringLength());
7228}
7229
7230
7231HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
7232 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
7233 HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) {
7234 if (is_uninitialized) {
7235 Add<HDeoptimize>(
7236 Deoptimizer::kInsufficientTypeFeedbackForGenericNamedAccess,
7237 Deoptimizer::SOFT);
7238 }
7239 if (access_type == LOAD) {
7240 Handle<TypeFeedbackVector> vector =
7241 handle(current_feedback_vector(), isolate());
7242
7243 if (!expr->AsProperty()->key()->IsPropertyName()) {
7244 // It's possible that a keyed load of a constant string was converted
7245 // to a named load. Here, at the last minute, we need to make sure to
7246 // use a generic Keyed Load if we are using the type vector, because
7247 // it has to share information with full code.
7248 HConstant* key = Add<HConstant>(name);
7249 HLoadKeyedGeneric* result = New<HLoadKeyedGeneric>(
7250 object, key, function_language_mode(), PREMONOMORPHIC);
7251 result->SetVectorAndSlot(vector, slot);
7252 return result;
7253 }
7254
7255 HLoadNamedGeneric* result = New<HLoadNamedGeneric>(
7256 object, name, function_language_mode(), PREMONOMORPHIC);
7257 result->SetVectorAndSlot(vector, slot);
7258 return result;
7259 } else {
7260 if (current_feedback_vector()->GetKind(slot) ==
7261 FeedbackVectorSlotKind::KEYED_STORE_IC) {
7262 // It's possible that a keyed store of a constant string was converted
7263 // to a named store. Here, at the last minute, we need to make sure to
7264 // use a generic Keyed Store if we are using the type vector, because
7265 // it has to share information with full code.
7266 HConstant* key = Add<HConstant>(name);
7267 HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
7268 object, key, value, function_language_mode(), PREMONOMORPHIC);
7269 Handle<TypeFeedbackVector> vector =
7270 handle(current_feedback_vector(), isolate());
7271 result->SetVectorAndSlot(vector, slot);
7272 return result;
7273 }
7274
7275 HStoreNamedGeneric* result = New<HStoreNamedGeneric>(
7276 object, name, value, function_language_mode(), PREMONOMORPHIC);
7277 Handle<TypeFeedbackVector> vector =
7278 handle(current_feedback_vector(), isolate());
7279 result->SetVectorAndSlot(vector, slot);
7280 return result;
7281 }
7282}
7283
7284
7285HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
7286 PropertyAccessType access_type, Expression* expr, FeedbackVectorSlot slot,
7287 HValue* object, HValue* key, HValue* value) {
7288 if (access_type == LOAD) {
7289 InlineCacheState initial_state = expr->AsProperty()->GetInlineCacheState();
7290 HLoadKeyedGeneric* result = New<HLoadKeyedGeneric>(
7291 object, key, function_language_mode(), initial_state);
7292 // HLoadKeyedGeneric with vector ics benefits from being encoded as
7293 // MEGAMORPHIC because the vector/slot combo becomes unnecessary.
7294 if (initial_state != MEGAMORPHIC) {
7295 // We need to pass vector information.
7296 Handle<TypeFeedbackVector> vector =
7297 handle(current_feedback_vector(), isolate());
7298 result->SetVectorAndSlot(vector, slot);
7299 }
7300 return result;
7301 } else {
7302 HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
7303 object, key, value, function_language_mode(), PREMONOMORPHIC);
7304 Handle<TypeFeedbackVector> vector =
7305 handle(current_feedback_vector(), isolate());
7306 result->SetVectorAndSlot(vector, slot);
7307 return result;
7308 }
7309}
7310
7311
7312LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
7313 // Loads from a "stock" fast holey double arrays can elide the hole check.
7314 // Loads from a "stock" fast holey array can convert the hole to undefined
7315 // with impunity.
7316 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7317 bool holey_double_elements =
7318 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS);
7319 bool holey_elements =
7320 *map == isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS);
7321 if ((holey_double_elements || holey_elements) &&
7322 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
7323 load_mode =
7324 holey_double_elements ? ALLOW_RETURN_HOLE : CONVERT_HOLE_TO_UNDEFINED;
7325
7326 Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
7327 Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
7328 BuildCheckPrototypeMaps(prototype, object_prototype);
7329 graph()->MarkDependsOnEmptyArrayProtoElements();
7330 }
7331 return load_mode;
7332}
7333
7334
7335HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
7336 HValue* object,
7337 HValue* key,
7338 HValue* val,
7339 HValue* dependency,
7340 Handle<Map> map,
7341 PropertyAccessType access_type,
7342 KeyedAccessStoreMode store_mode) {
7343 HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
7344
7345 if (access_type == STORE && map->prototype()->IsJSObject()) {
7346 // monomorphic stores need a prototype chain check because shape
7347 // changes could allow callbacks on elements in the chain that
7348 // aren't compatible with monomorphic keyed stores.
7349 PrototypeIterator iter(map);
7350 JSObject* holder = NULL;
7351 while (!iter.IsAtEnd()) {
7352 // JSProxies can't occur here because we wouldn't have installed a
7353 // non-generic IC if there were any.
7354 holder = *PrototypeIterator::GetCurrent<JSObject>(iter);
7355 iter.Advance();
7356 }
7357 DCHECK(holder && holder->IsJSObject());
7358
7359 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
7360 Handle<JSObject>(holder));
7361 }
7362
7363 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7364 return BuildUncheckedMonomorphicElementAccess(
7365 checked_object, key, val,
7366 map->instance_type() == JS_ARRAY_TYPE,
7367 map->elements_kind(), access_type,
7368 load_mode, store_mode);
7369}
7370
7371
7372static bool CanInlineElementAccess(Handle<Map> map) {
7373 return map->IsJSObjectMap() && !map->has_dictionary_elements() &&
7374 !map->has_sloppy_arguments_elements() &&
7375 !map->has_indexed_interceptor() && !map->is_access_check_needed();
7376}
7377
7378
7379HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
7380 HValue* object,
7381 HValue* key,
7382 HValue* val,
7383 SmallMapList* maps) {
7384 // For polymorphic loads of similar elements kinds (i.e. all tagged or all
7385 // double), always use the "worst case" code without a transition. This is
7386 // much faster than transitioning the elements to the worst case, trading a
7387 // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
7388 bool has_double_maps = false;
7389 bool has_smi_or_object_maps = false;
7390 bool has_js_array_access = false;
7391 bool has_non_js_array_access = false;
7392 bool has_seen_holey_elements = false;
7393 Handle<Map> most_general_consolidated_map;
7394 for (int i = 0; i < maps->length(); ++i) {
7395 Handle<Map> map = maps->at(i);
7396 if (!CanInlineElementAccess(map)) return NULL;
7397 // Don't allow mixing of JSArrays with JSObjects.
7398 if (map->instance_type() == JS_ARRAY_TYPE) {
7399 if (has_non_js_array_access) return NULL;
7400 has_js_array_access = true;
7401 } else if (has_js_array_access) {
7402 return NULL;
7403 } else {
7404 has_non_js_array_access = true;
7405 }
7406 // Don't allow mixed, incompatible elements kinds.
7407 if (map->has_fast_double_elements()) {
7408 if (has_smi_or_object_maps) return NULL;
7409 has_double_maps = true;
7410 } else if (map->has_fast_smi_or_object_elements()) {
7411 if (has_double_maps) return NULL;
7412 has_smi_or_object_maps = true;
7413 } else {
7414 return NULL;
7415 }
7416 // Remember if we've ever seen holey elements.
7417 if (IsHoleyElementsKind(map->elements_kind())) {
7418 has_seen_holey_elements = true;
7419 }
7420 // Remember the most general elements kind, the code for its load will
7421 // properly handle all of the more specific cases.
7422 if ((i == 0) || IsMoreGeneralElementsKindTransition(
7423 most_general_consolidated_map->elements_kind(),
7424 map->elements_kind())) {
7425 most_general_consolidated_map = map;
7426 }
7427 }
7428 if (!has_double_maps && !has_smi_or_object_maps) return NULL;
7429
7430 HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
7431 // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
7432 // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
7433 ElementsKind consolidated_elements_kind = has_seen_holey_elements
7434 ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
7435 : most_general_consolidated_map->elements_kind();
7436 LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7437 if (has_seen_holey_elements) {
7438 // Make sure that all of the maps we are handling have the initial array
7439 // prototype.
7440 bool saw_non_array_prototype = false;
7441 for (int i = 0; i < maps->length(); ++i) {
7442 Handle<Map> map = maps->at(i);
7443 if (map->prototype() != *isolate()->initial_array_prototype()) {
7444 // We can't guarantee that loading the hole is safe. The prototype may
7445 // have an element at this position.
7446 saw_non_array_prototype = true;
7447 break;
7448 }
7449 }
7450
7451 if (!saw_non_array_prototype) {
7452 Handle<Map> holey_map = handle(
7453 isolate()->get_initial_js_array_map(consolidated_elements_kind));
7454 load_mode = BuildKeyedHoleMode(holey_map);
7455 if (load_mode != NEVER_RETURN_HOLE) {
7456 for (int i = 0; i < maps->length(); ++i) {
7457 Handle<Map> map = maps->at(i);
7458 // The prototype check was already done for the holey map in
7459 // BuildKeyedHoleMode.
7460 if (!map.is_identical_to(holey_map)) {
7461 Handle<JSObject> prototype(JSObject::cast(map->prototype()),
7462 isolate());
7463 Handle<JSObject> object_prototype =
7464 isolate()->initial_object_prototype();
7465 BuildCheckPrototypeMaps(prototype, object_prototype);
7466 }
7467 }
7468 }
7469 }
7470 }
7471 HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
7472 checked_object, key, val,
7473 most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
7474 consolidated_elements_kind, LOAD, load_mode, STANDARD_STORE);
7475 return instr;
7476}
7477
7478
7479HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
7480 Expression* expr, FeedbackVectorSlot slot, HValue* object, HValue* key,
7481 HValue* val, SmallMapList* maps, PropertyAccessType access_type,
7482 KeyedAccessStoreMode store_mode, bool* has_side_effects) {
7483 *has_side_effects = false;
7484 BuildCheckHeapObject(object);
7485
7486 if (access_type == LOAD) {
7487 HInstruction* consolidated_load =
7488 TryBuildConsolidatedElementLoad(object, key, val, maps);
7489 if (consolidated_load != NULL) {
7490 *has_side_effects |= consolidated_load->HasObservableSideEffects();
7491 return consolidated_load;
7492 }
7493 }
7494
7495 // Elements_kind transition support.
7496 MapHandleList transition_target(maps->length());
7497 // Collect possible transition targets.
7498 MapHandleList possible_transitioned_maps(maps->length());
7499 for (int i = 0; i < maps->length(); ++i) {
7500 Handle<Map> map = maps->at(i);
7501 // Loads from strings or loads with a mix of string and non-string maps
7502 // shouldn't be handled polymorphically.
7503 DCHECK(access_type != LOAD || !map->IsStringMap());
7504 ElementsKind elements_kind = map->elements_kind();
7505 if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
7506 elements_kind != GetInitialFastElementsKind()) {
7507 possible_transitioned_maps.Add(map);
7508 }
7509 if (IsSloppyArgumentsElements(elements_kind)) {
7510 HInstruction* result =
7511 BuildKeyedGeneric(access_type, expr, slot, object, key, val);
7512 *has_side_effects = result->HasObservableSideEffects();
7513 return AddInstruction(result);
7514 }
7515 }
7516 // Get transition target for each map (NULL == no transition).
7517 for (int i = 0; i < maps->length(); ++i) {
7518 Handle<Map> map = maps->at(i);
7519 Handle<Map> transitioned_map =
7520 Map::FindTransitionedMap(map, &possible_transitioned_maps);
7521 transition_target.Add(transitioned_map);
7522 }
7523
7524 MapHandleList untransitionable_maps(maps->length());
7525 HTransitionElementsKind* transition = NULL;
7526 for (int i = 0; i < maps->length(); ++i) {
7527 Handle<Map> map = maps->at(i);
7528 DCHECK(map->IsMap());
7529 if (!transition_target.at(i).is_null()) {
7530 DCHECK(Map::IsValidElementsTransition(
7531 map->elements_kind(),
7532 transition_target.at(i)->elements_kind()));
7533 transition = Add<HTransitionElementsKind>(object, map,
7534 transition_target.at(i));
7535 } else {
7536 untransitionable_maps.Add(map);
7537 }
7538 }
7539
7540 // If only one map is left after transitioning, handle this case
7541 // monomorphically.
7542 DCHECK(untransitionable_maps.length() >= 1);
7543 if (untransitionable_maps.length() == 1) {
7544 Handle<Map> untransitionable_map = untransitionable_maps[0];
7545 HInstruction* instr = NULL;
7546 if (!CanInlineElementAccess(untransitionable_map)) {
7547 instr = AddInstruction(
7548 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7549 } else {
7550 instr = BuildMonomorphicElementAccess(
7551 object, key, val, transition, untransitionable_map, access_type,
7552 store_mode);
7553 }
7554 *has_side_effects |= instr->HasObservableSideEffects();
7555 return access_type == STORE ? val : instr;
7556 }
7557
7558 HBasicBlock* join = graph()->CreateBasicBlock();
7559
7560 for (int i = 0; i < untransitionable_maps.length(); ++i) {
7561 Handle<Map> map = untransitionable_maps[i];
7562 ElementsKind elements_kind = map->elements_kind();
7563 HBasicBlock* this_map = graph()->CreateBasicBlock();
7564 HBasicBlock* other_map = graph()->CreateBasicBlock();
7565 HCompareMap* mapcompare =
7566 New<HCompareMap>(object, map, this_map, other_map);
7567 FinishCurrentBlock(mapcompare);
7568
7569 set_current_block(this_map);
7570 HInstruction* access = NULL;
7571 if (!CanInlineElementAccess(map)) {
7572 access = AddInstruction(
7573 BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7574 } else {
7575 DCHECK(IsFastElementsKind(elements_kind) ||
7576 IsFixedTypedArrayElementsKind(elements_kind));
7577 LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7578 // Happily, mapcompare is a checked object.
7579 access = BuildUncheckedMonomorphicElementAccess(
7580 mapcompare, key, val,
7581 map->instance_type() == JS_ARRAY_TYPE,
7582 elements_kind, access_type,
7583 load_mode,
7584 store_mode);
7585 }
7586 *has_side_effects |= access->HasObservableSideEffects();
7587 // The caller will use has_side_effects and add a correct Simulate.
7588 access->SetFlag(HValue::kHasNoObservableSideEffects);
7589 if (access_type == LOAD) {
7590 Push(access);
7591 }
7592 NoObservableSideEffectsScope scope(this);
7593 GotoNoSimulate(join);
7594 set_current_block(other_map);
7595 }
7596
7597 // Ensure that we visited at least one map above that goes to join. This is
7598 // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7599 // rather than joining the join block. If this becomes an issue, insert a
7600 // generic access in the case length() == 0.
7601 DCHECK(join->predecessors()->length() > 0);
7602 // Deopt if none of the cases matched.
7603 NoObservableSideEffectsScope scope(this);
7604 FinishExitWithHardDeoptimization(
7605 Deoptimizer::kUnknownMapInPolymorphicElementAccess);
7606 set_current_block(join);
7607 return access_type == STORE ? val : Pop();
7608}
7609
7610
7611HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
7612 HValue* obj, HValue* key, HValue* val, Expression* expr,
7613 FeedbackVectorSlot slot, BailoutId ast_id, BailoutId return_id,
7614 PropertyAccessType access_type, bool* has_side_effects) {
7615 // A keyed name access with type feedback may contain the name.
7616 Handle<TypeFeedbackVector> vector =
7617 handle(current_feedback_vector(), isolate());
7618 HValue* expected_key = key;
7619 if (!key->ActualValue()->IsConstant()) {
7620 Name* name = nullptr;
7621 if (access_type == LOAD) {
7622 KeyedLoadICNexus nexus(vector, slot);
7623 name = nexus.FindFirstName();
7624 } else {
7625 KeyedStoreICNexus nexus(vector, slot);
7626 name = nexus.FindFirstName();
7627 }
7628 if (name != nullptr) {
7629 Handle<Name> handle_name(name);
7630 expected_key = Add<HConstant>(handle_name);
7631 // We need a check against the key.
7632 bool in_new_space = isolate()->heap()->InNewSpace(*handle_name);
7633 Unique<Name> unique_name = Unique<Name>::CreateUninitialized(handle_name);
7634 Add<HCheckValue>(key, unique_name, in_new_space);
7635 }
7636 }
7637 if (expected_key->ActualValue()->IsConstant()) {
7638 Handle<Object> constant =
7639 HConstant::cast(expected_key->ActualValue())->handle(isolate());
7640 uint32_t array_index;
7641 if ((constant->IsString() &&
7642 !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) ||
7643 constant->IsSymbol()) {
7644 if (!constant->IsUniqueName()) {
7645 constant = isolate()->factory()->InternalizeString(
7646 Handle<String>::cast(constant));
7647 }
7648 HValue* access =
7649 BuildNamedAccess(access_type, ast_id, return_id, expr, slot, obj,
7650 Handle<Name>::cast(constant), val, false);
7651 if (access == NULL || access->IsPhi() ||
7652 HInstruction::cast(access)->IsLinked()) {
7653 *has_side_effects = false;
7654 } else {
7655 HInstruction* instr = HInstruction::cast(access);
7656 AddInstruction(instr);
7657 *has_side_effects = instr->HasObservableSideEffects();
7658 }
7659 return access;
7660 }
7661 }
7662
7663 DCHECK(!expr->IsPropertyName());
7664 HInstruction* instr = NULL;
7665
7666 SmallMapList* maps;
7667 bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, zone());
7668
7669 bool force_generic = false;
7670 if (expr->GetKeyType() == PROPERTY) {
7671 // Non-Generic accesses assume that elements are being accessed, and will
7672 // deopt for non-index keys, which the IC knows will occur.
7673 // TODO(jkummerow): Consider adding proper support for property accesses.
7674 force_generic = true;
7675 monomorphic = false;
7676 } else if (access_type == STORE &&
7677 (monomorphic || (maps != NULL && !maps->is_empty()))) {
7678 // Stores can't be mono/polymorphic if their prototype chain has dictionary
7679 // elements. However a receiver map that has dictionary elements itself
7680 // should be left to normal mono/poly behavior (the other maps may benefit
7681 // from highly optimized stores).
7682 for (int i = 0; i < maps->length(); i++) {
7683 Handle<Map> current_map = maps->at(i);
7684 if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7685 force_generic = true;
7686 monomorphic = false;
7687 break;
7688 }
7689 }
7690 } else if (access_type == LOAD && !monomorphic &&
7691 (maps != NULL && !maps->is_empty())) {
7692 // Polymorphic loads have to go generic if any of the maps are strings.
7693 // If some, but not all of the maps are strings, we should go generic
7694 // because polymorphic access wants to key on ElementsKind and isn't
7695 // compatible with strings.
7696 for (int i = 0; i < maps->length(); i++) {
7697 Handle<Map> current_map = maps->at(i);
7698 if (current_map->IsStringMap()) {
7699 force_generic = true;
7700 break;
7701 }
7702 }
7703 }
7704
7705 if (monomorphic) {
7706 Handle<Map> map = maps->first();
7707 if (!CanInlineElementAccess(map)) {
7708 instr = AddInstruction(
7709 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7710 } else {
7711 BuildCheckHeapObject(obj);
7712 instr = BuildMonomorphicElementAccess(
7713 obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7714 }
7715 } else if (!force_generic && (maps != NULL && !maps->is_empty())) {
7716 return HandlePolymorphicElementAccess(expr, slot, obj, key, val, maps,
7717 access_type, expr->GetStoreMode(),
7718 has_side_effects);
7719 } else {
7720 if (access_type == STORE) {
7721 if (expr->IsAssignment() &&
7722 expr->AsAssignment()->HasNoTypeInformation()) {
7723 Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedStore,
7724 Deoptimizer::SOFT);
7725 }
7726 } else {
7727 if (expr->AsProperty()->HasNoTypeInformation()) {
7728 Add<HDeoptimize>(Deoptimizer::kInsufficientTypeFeedbackForKeyedLoad,
7729 Deoptimizer::SOFT);
7730 }
7731 }
7732 instr = AddInstruction(
7733 BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7734 }
7735 *has_side_effects = instr->HasObservableSideEffects();
7736 return instr;
7737}
7738
7739
7740void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
7741 // Outermost function already has arguments on the stack.
7742 if (function_state()->outer() == NULL) return;
7743
7744 if (function_state()->arguments_pushed()) return;
7745
7746 // Push arguments when entering inlined function.
7747 HEnterInlined* entry = function_state()->entry();
7748 entry->set_arguments_pushed();
7749
7750 HArgumentsObject* arguments = entry->arguments_object();
7751 const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7752
7753 HInstruction* insert_after = entry;
7754 for (int i = 0; i < arguments_values->length(); i++) {
7755 HValue* argument = arguments_values->at(i);
7756 HInstruction* push_argument = New<HPushArguments>(argument);
7757 push_argument->InsertAfter(insert_after);
7758 insert_after = push_argument;
7759 }
7760
7761 HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7762 arguments_elements->ClearFlag(HValue::kUseGVN);
7763 arguments_elements->InsertAfter(insert_after);
7764 function_state()->set_arguments_elements(arguments_elements);
7765}
7766
7767
7768bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
7769 VariableProxy* proxy = expr->obj()->AsVariableProxy();
7770 if (proxy == NULL) return false;
7771 if (!proxy->var()->IsStackAllocated()) return false;
7772 if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7773 return false;
7774 }
7775
7776 HInstruction* result = NULL;
7777 if (expr->key()->IsPropertyName()) {
7778 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7779 if (!String::Equals(name, isolate()->factory()->length_string())) {
7780 return false;
7781 }
7782
7783 if (function_state()->outer() == NULL) {
7784 HInstruction* elements = Add<HArgumentsElements>(false);
7785 result = New<HArgumentsLength>(elements);
7786 } else {
7787 // Number of arguments without receiver.
7788 int argument_count = environment()->
7789 arguments_environment()->parameter_count() - 1;
7790 result = New<HConstant>(argument_count);
7791 }
7792 } else {
7793 Push(graph()->GetArgumentsObject());
7794 CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7795 HValue* key = Pop();
7796 Drop(1); // Arguments object.
7797 if (function_state()->outer() == NULL) {
7798 HInstruction* elements = Add<HArgumentsElements>(false);
7799 HInstruction* length = Add<HArgumentsLength>(elements);
7800 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7801 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7802 } else {
7803 EnsureArgumentsArePushedForAccess();
7804
7805 // Number of arguments without receiver.
7806 HInstruction* elements = function_state()->arguments_elements();
7807 int argument_count = environment()->
7808 arguments_environment()->parameter_count() - 1;
7809 HInstruction* length = Add<HConstant>(argument_count);
7810 HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7811 result = New<HAccessArgumentsAt>(elements, length, checked_key);
7812 }
7813 }
7814 ast_context()->ReturnInstruction(result, expr->id());
7815 return true;
7816}
7817
7818
7819HValue* HOptimizedGraphBuilder::BuildNamedAccess(
7820 PropertyAccessType access, BailoutId ast_id, BailoutId return_id,
7821 Expression* expr, FeedbackVectorSlot slot, HValue* object,
7822 Handle<Name> name, HValue* value, bool is_uninitialized) {
7823 SmallMapList* maps;
7824 ComputeReceiverTypes(expr, object, &maps, zone());
7825 DCHECK(maps != NULL);
7826
7827 if (maps->length() > 0) {
7828 PropertyAccessInfo info(this, access, maps->first(), name);
7829 if (!info.CanAccessAsMonomorphic(maps)) {
7830 HandlePolymorphicNamedFieldAccess(access, expr, slot, ast_id, return_id,
7831 object, value, maps, name);
7832 return NULL;
7833 }
7834
7835 HValue* checked_object;
7836 // Type::Number() is only supported by polymorphic load/call handling.
7837 DCHECK(!info.IsNumberType());
7838 BuildCheckHeapObject(object);
7839 if (AreStringTypes(maps)) {
7840 checked_object =
7841 Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
7842 } else {
7843 checked_object = Add<HCheckMaps>(object, maps);
7844 }
7845 return BuildMonomorphicAccess(
7846 &info, object, checked_object, value, ast_id, return_id);
7847 }
7848
7849 return BuildNamedGeneric(access, expr, slot, object, name, value,
7850 is_uninitialized);
7851}
7852
7853
7854void HOptimizedGraphBuilder::PushLoad(Property* expr,
7855 HValue* object,
7856 HValue* key) {
7857 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
7858 Push(object);
7859 if (key != NULL) Push(key);
7860 BuildLoad(expr, expr->LoadId());
7861}
7862
7863
7864void HOptimizedGraphBuilder::BuildLoad(Property* expr,
7865 BailoutId ast_id) {
7866 HInstruction* instr = NULL;
7867 if (expr->IsStringAccess() && expr->GetKeyType() == ELEMENT) {
7868 HValue* index = Pop();
7869 HValue* string = Pop();
7870 HInstruction* char_code = BuildStringCharCodeAt(string, index);
7871 AddInstruction(char_code);
7872 instr = NewUncasted<HStringCharFromCode>(char_code);
7873
7874 } else if (expr->key()->IsPropertyName()) {
7875 Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7876 HValue* object = Pop();
7877
7878 HValue* value = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
7879 expr->PropertyFeedbackSlot(), object, name,
7880 NULL, expr->IsUninitialized());
7881 if (value == NULL) return;
7882 if (value->IsPhi()) return ast_context()->ReturnValue(value);
7883 instr = HInstruction::cast(value);
7884 if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
7885
7886 } else {
7887 HValue* key = Pop();
7888 HValue* obj = Pop();
7889
7890 bool has_side_effects = false;
7891 HValue* load = HandleKeyedElementAccess(
7892 obj, key, NULL, expr, expr->PropertyFeedbackSlot(), ast_id,
7893 expr->LoadId(), LOAD, &has_side_effects);
7894 if (has_side_effects) {
7895 if (ast_context()->IsEffect()) {
7896 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7897 } else {
7898 Push(load);
7899 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7900 Drop(1);
7901 }
7902 }
7903 if (load == NULL) return;
7904 return ast_context()->ReturnValue(load);
7905 }
7906 return ast_context()->ReturnInstruction(instr, ast_id);
7907}
7908
7909
7910void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
7911 DCHECK(!HasStackOverflow());
7912 DCHECK(current_block() != NULL);
7913 DCHECK(current_block()->HasPredecessor());
7914
7915 if (TryArgumentsAccess(expr)) return;
7916
7917 CHECK_ALIVE(VisitForValue(expr->obj()));
7918 if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
7919 CHECK_ALIVE(VisitForValue(expr->key()));
7920 }
7921
7922 BuildLoad(expr, expr->id());
7923}
7924
7925
7926HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant) {
7927 HCheckMaps* check = Add<HCheckMaps>(
7928 Add<HConstant>(constant), handle(constant->map()));
7929 check->ClearDependsOnFlag(kElementsKind);
7930 return check;
7931}
7932
7933
7934HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
7935 Handle<JSObject> holder) {
7936 PrototypeIterator iter(isolate(), prototype,
7937 PrototypeIterator::START_AT_RECEIVER);
7938 while (holder.is_null() ||
7939 !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
7940 BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
7941 iter.Advance();
7942 if (iter.IsAtEnd()) {
7943 return NULL;
7944 }
7945 }
7946 return BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter));
7947}
7948
7949
7950void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
7951 Handle<Map> receiver_map) {
7952 if (!holder.is_null()) {
7953 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7954 BuildCheckPrototypeMaps(prototype, holder);
7955 }
7956}
7957
7958
7959HInstruction* HOptimizedGraphBuilder::NewPlainFunctionCall(HValue* fun,
7960 int argument_count) {
7961 return New<HCallJSFunction>(fun, argument_count);
7962}
7963
7964
7965HInstruction* HOptimizedGraphBuilder::NewArgumentAdaptorCall(
7966 HValue* fun, HValue* context,
7967 int argument_count, HValue* expected_param_count) {
7968 HValue* new_target = graph()->GetConstantUndefined();
7969 HValue* arity = Add<HConstant>(argument_count - 1);
7970
7971 HValue* op_vals[] = {context, fun, new_target, arity, expected_param_count};
7972
7973 Callable callable = CodeFactory::ArgumentAdaptor(isolate());
7974 HConstant* stub = Add<HConstant>(callable.code());
7975
7976 return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
7977 Vector<HValue*>(op_vals, arraysize(op_vals)));
7978}
7979
7980
7981HInstruction* HOptimizedGraphBuilder::BuildCallConstantFunction(
7982 Handle<JSFunction> jsfun, int argument_count) {
7983 HValue* target = Add<HConstant>(jsfun);
7984 // For constant functions, we try to avoid calling the
7985 // argument adaptor and instead call the function directly
7986 int formal_parameter_count =
7987 jsfun->shared()->internal_formal_parameter_count();
7988 bool dont_adapt_arguments =
7989 (formal_parameter_count ==
7990 SharedFunctionInfo::kDontAdaptArgumentsSentinel);
7991 int arity = argument_count - 1;
7992 bool can_invoke_directly =
7993 dont_adapt_arguments || formal_parameter_count == arity;
7994 if (can_invoke_directly) {
7995 if (jsfun.is_identical_to(current_info()->closure())) {
7996 graph()->MarkRecursive();
7997 }
7998 return NewPlainFunctionCall(target, argument_count);
7999 } else {
8000 HValue* param_count_value = Add<HConstant>(formal_parameter_count);
8001 HValue* context = Add<HLoadNamedField>(
8002 target, nullptr, HObjectAccess::ForFunctionContextPointer());
8003 return NewArgumentAdaptorCall(target, context,
8004 argument_count, param_count_value);
8005 }
8006 UNREACHABLE();
8007 return NULL;
8008}
8009
8010
8011class FunctionSorter {
8012 public:
8013 explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
8014 : index_(index), ticks_(ticks), size_(size) {}
8015
8016 int index() const { return index_; }
8017 int ticks() const { return ticks_; }
8018 int size() const { return size_; }
8019
8020 private:
8021 int index_;
8022 int ticks_;
8023 int size_;
8024};
8025
8026
8027inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
8028 int diff = lhs.ticks() - rhs.ticks();
8029 if (diff != 0) return diff > 0;
8030 return lhs.size() < rhs.size();
8031}
8032
8033
8034void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
8035 HValue* receiver,
8036 SmallMapList* maps,
8037 Handle<String> name) {
8038 int argument_count = expr->arguments()->length() + 1; // Includes receiver.
8039 FunctionSorter order[kMaxCallPolymorphism];
8040
8041 bool handle_smi = false;
8042 bool handled_string = false;
8043 int ordered_functions = 0;
8044
8045 int i;
8046 for (i = 0; i < maps->length() && ordered_functions < kMaxCallPolymorphism;
8047 ++i) {
8048 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
8049 if (info.CanAccessMonomorphic() && info.IsDataConstant() &&
8050 info.constant()->IsJSFunction()) {
8051 if (info.IsStringType()) {
8052 if (handled_string) continue;
8053 handled_string = true;
8054 }
8055 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
8056 if (info.IsNumberType()) {
8057 handle_smi = true;
8058 }
8059 expr->set_target(target);
8060 order[ordered_functions++] = FunctionSorter(
8061 i, target->shared()->profiler_ticks(), InliningAstSize(target));
8062 }
8063 }
8064
8065 std::sort(order, order + ordered_functions);
8066
8067 if (i < maps->length()) {
8068 maps->Clear();
8069 ordered_functions = -1;
8070 }
8071
8072 HBasicBlock* number_block = NULL;
8073 HBasicBlock* join = NULL;
8074 handled_string = false;
8075 int count = 0;
8076
8077 for (int fn = 0; fn < ordered_functions; ++fn) {
8078 int i = order[fn].index();
8079 PropertyAccessInfo info(this, LOAD, maps->at(i), name);
8080 if (info.IsStringType()) {
8081 if (handled_string) continue;
8082 handled_string = true;
8083 }
8084 // Reloads the target.
8085 info.CanAccessMonomorphic();
8086 Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
8087
8088 expr->set_target(target);
8089 if (count == 0) {
8090 // Only needed once.
8091 join = graph()->CreateBasicBlock();
8092 if (handle_smi) {
8093 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
8094 HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
8095 number_block = graph()->CreateBasicBlock();
8096 FinishCurrentBlock(New<HIsSmiAndBranch>(
8097 receiver, empty_smi_block, not_smi_block));
8098 GotoNoSimulate(empty_smi_block, number_block);
8099 set_current_block(not_smi_block);
8100 } else {
8101 BuildCheckHeapObject(receiver);
8102 }
8103 }
8104 ++count;
8105 HBasicBlock* if_true = graph()->CreateBasicBlock();
8106 HBasicBlock* if_false = graph()->CreateBasicBlock();
8107 HUnaryControlInstruction* compare;
8108
8109 Handle<Map> map = info.map();
8110 if (info.IsNumberType()) {
8111 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
8112 compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
8113 } else if (info.IsStringType()) {
8114 compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
8115 } else {
8116 compare = New<HCompareMap>(receiver, map, if_true, if_false);
8117 }
8118 FinishCurrentBlock(compare);
8119
8120 if (info.IsNumberType()) {
8121 GotoNoSimulate(if_true, number_block);
8122 if_true = number_block;
8123 }
8124
8125 set_current_block(if_true);
8126
8127 AddCheckPrototypeMaps(info.holder(), map);
8128
8129 HValue* function = Add<HConstant>(expr->target());
8130 environment()->SetExpressionStackAt(0, function);
8131 Push(receiver);
8132 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8133 bool needs_wrapping = info.NeedsWrappingFor(target);
8134 bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
8135 if (FLAG_trace_inlining && try_inline) {
8136 Handle<JSFunction> caller = current_info()->closure();
8137 base::SmartArrayPointer<char> caller_name =
8138 caller->shared()->DebugName()->ToCString();
8139 PrintF("Trying to inline the polymorphic call to %s from %s\n",
8140 name->ToCString().get(),
8141 caller_name.get());
8142 }
8143 if (try_inline && TryInlineCall(expr)) {
8144 // Trying to inline will signal that we should bailout from the
8145 // entire compilation by setting stack overflow on the visitor.
8146 if (HasStackOverflow()) return;
8147 } else {
8148 // Since HWrapReceiver currently cannot actually wrap numbers and strings,
8149 // use the regular CallFunctionStub for method calls to wrap the receiver.
8150 // TODO(verwaest): Support creation of value wrappers directly in
8151 // HWrapReceiver.
8152 HInstruction* call =
8153 needs_wrapping ? NewUncasted<HCallFunction>(
8154 function, argument_count,
8155 ConvertReceiverMode::kNotNullOrUndefined)
8156 : BuildCallConstantFunction(target, argument_count);
8157 PushArgumentsFromEnvironment(argument_count);
8158 AddInstruction(call);
8159 Drop(1); // Drop the function.
8160 if (!ast_context()->IsEffect()) Push(call);
8161 }
8162
8163 if (current_block() != NULL) Goto(join);
8164 set_current_block(if_false);
8165 }
8166
8167 // Finish up. Unconditionally deoptimize if we've handled all the maps we
8168 // know about and do not want to handle ones we've never seen. Otherwise
8169 // use a generic IC.
8170 if (ordered_functions == maps->length() && FLAG_deoptimize_uncommon_cases) {
8171 FinishExitWithHardDeoptimization(Deoptimizer::kUnknownMapInPolymorphicCall);
8172 } else {
8173 Property* prop = expr->expression()->AsProperty();
8174 HInstruction* function =
8175 BuildNamedGeneric(LOAD, prop, prop->PropertyFeedbackSlot(), receiver,
8176 name, NULL, prop->IsUninitialized());
8177 AddInstruction(function);
8178 Push(function);
8179 AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
8180
8181 environment()->SetExpressionStackAt(1, function);
8182 environment()->SetExpressionStackAt(0, receiver);
8183 CHECK_ALIVE(VisitExpressions(expr->arguments()));
8184
8185 HInstruction* call = New<HCallFunction>(
8186 function, argument_count, ConvertReceiverMode::kNotNullOrUndefined);
8187
8188 PushArgumentsFromEnvironment(argument_count);
8189
8190 Drop(1); // Function.
8191
8192 if (join != NULL) {
8193 AddInstruction(call);
8194 if (!ast_context()->IsEffect()) Push(call);
8195 Goto(join);
8196 } else {
8197 return ast_context()->ReturnInstruction(call, expr->id());
8198 }
8199 }
8200
8201 // We assume that control flow is always live after an expression. So
8202 // even without predecessors to the join block, we set it as the exit
8203 // block and continue by adding instructions there.
8204 DCHECK(join != NULL);
8205 if (join->HasPredecessor()) {
8206 set_current_block(join);
8207 join->SetJoinId(expr->id());
8208 if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
8209 } else {
8210 set_current_block(NULL);
8211 }
8212}
8213
8214
8215void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
8216 Handle<JSFunction> caller,
8217 const char* reason) {
8218 if (FLAG_trace_inlining) {
8219 base::SmartArrayPointer<char> target_name =
8220 target->shared()->DebugName()->ToCString();
8221 base::SmartArrayPointer<char> caller_name =
8222 caller->shared()->DebugName()->ToCString();
8223 if (reason == NULL) {
8224 PrintF("Inlined %s called from %s.\n", target_name.get(),
8225 caller_name.get());
8226 } else {
8227 PrintF("Did not inline %s called from %s (%s).\n",
8228 target_name.get(), caller_name.get(), reason);
8229 }
8230 }
8231}
8232
8233
8234static const int kNotInlinable = 1000000000;
8235
8236
8237int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
8238 if (!FLAG_use_inlining) return kNotInlinable;
8239
8240 // Precondition: call is monomorphic and we have found a target with the
8241 // appropriate arity.
8242 Handle<JSFunction> caller = current_info()->closure();
8243 Handle<SharedFunctionInfo> target_shared(target->shared());
8244
8245 // Always inline functions that force inlining.
8246 if (target_shared->force_inline()) {
8247 return 0;
8248 }
8249 if (target->shared()->IsBuiltin()) {
8250 return kNotInlinable;
8251 }
8252
8253 if (target_shared->IsApiFunction()) {
8254 TraceInline(target, caller, "target is api function");
8255 return kNotInlinable;
8256 }
8257
8258 // Do a quick check on source code length to avoid parsing large
8259 // inlining candidates.
8260 if (target_shared->SourceSize() >
8261 Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
8262 TraceInline(target, caller, "target text too big");
8263 return kNotInlinable;
8264 }
8265
8266 // Target must be inlineable.
8267 BailoutReason noopt_reason = target_shared->disable_optimization_reason();
8268 if (!target_shared->IsInlineable() && noopt_reason != kHydrogenFilter) {
8269 TraceInline(target, caller, "target not inlineable");
8270 return kNotInlinable;
8271 }
8272 if (noopt_reason != kNoReason && noopt_reason != kHydrogenFilter) {
8273 TraceInline(target, caller, "target contains unsupported syntax [early]");
8274 return kNotInlinable;
8275 }
8276
8277 int nodes_added = target_shared->ast_node_count();
8278 return nodes_added;
8279}
8280
8281
8282bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
8283 int arguments_count,
8284 HValue* implicit_return_value,
8285 BailoutId ast_id, BailoutId return_id,
8286 InliningKind inlining_kind) {
8287 if (target->context()->native_context() !=
8288 top_info()->closure()->context()->native_context()) {
8289 return false;
8290 }
8291 int nodes_added = InliningAstSize(target);
8292 if (nodes_added == kNotInlinable) return false;
8293
8294 Handle<JSFunction> caller = current_info()->closure();
8295
8296 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8297 TraceInline(target, caller, "target AST is too large [early]");
8298 return false;
8299 }
8300
8301 // Don't inline deeper than the maximum number of inlining levels.
8302 HEnvironment* env = environment();
8303 int current_level = 1;
8304 while (env->outer() != NULL) {
8305 if (current_level == FLAG_max_inlining_levels) {
8306 TraceInline(target, caller, "inline depth limit reached");
8307 return false;
8308 }
8309 if (env->outer()->frame_type() == JS_FUNCTION) {
8310 current_level++;
8311 }
8312 env = env->outer();
8313 }
8314
8315 // Don't inline recursive functions.
8316 for (FunctionState* state = function_state();
8317 state != NULL;
8318 state = state->outer()) {
8319 if (*state->compilation_info()->closure() == *target) {
8320 TraceInline(target, caller, "target is recursive");
8321 return false;
8322 }
8323 }
8324
8325 // We don't want to add more than a certain number of nodes from inlining.
8326 // Always inline small methods (<= 10 nodes).
8327 if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
8328 kUnlimitedMaxInlinedNodesCumulative)) {
8329 TraceInline(target, caller, "cumulative AST node limit reached");
8330 return false;
8331 }
8332
8333 // Parse and allocate variables.
8334 // Use the same AstValueFactory for creating strings in the sub-compilation
8335 // step, but don't transfer ownership to target_info.
8336 ParseInfo parse_info(zone(), target);
8337 parse_info.set_ast_value_factory(
8338 top_info()->parse_info()->ast_value_factory());
8339 parse_info.set_ast_value_factory_owned(false);
8340
8341 CompilationInfo target_info(&parse_info);
8342 Handle<SharedFunctionInfo> target_shared(target->shared());
8343
8344 if (IsClassConstructor(target_shared->kind())) {
8345 TraceInline(target, caller, "target is classConstructor");
8346 return false;
8347 }
8348 if (target_shared->HasDebugInfo()) {
8349 TraceInline(target, caller, "target is being debugged");
8350 return false;
8351 }
8352 if (!Compiler::ParseAndAnalyze(target_info.parse_info())) {
8353 if (target_info.isolate()->has_pending_exception()) {
8354 // Parse or scope error, never optimize this function.
8355 SetStackOverflow();
8356 target_shared->DisableOptimization(kParseScopeError);
8357 }
8358 TraceInline(target, caller, "parse failure");
8359 return false;
8360 }
8361
8362 if (target_info.scope()->num_heap_slots() > 0) {
8363 TraceInline(target, caller, "target has context-allocated variables");
8364 return false;
8365 }
8366
8367 int rest_index;
8368 Variable* rest = target_info.scope()->rest_parameter(&rest_index);
8369 if (rest) {
8370 TraceInline(target, caller, "target uses rest parameters");
8371 return false;
8372 }
8373
8374 FunctionLiteral* function = target_info.literal();
8375
8376 // The following conditions must be checked again after re-parsing, because
8377 // earlier the information might not have been complete due to lazy parsing.
8378 nodes_added = function->ast_node_count();
8379 if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8380 TraceInline(target, caller, "target AST is too large [late]");
8381 return false;
8382 }
8383 if (function->dont_optimize()) {
8384 TraceInline(target, caller, "target contains unsupported syntax [late]");
8385 return false;
8386 }
8387
8388 // If the function uses the arguments object check that inlining of functions
8389 // with arguments object is enabled and the arguments-variable is
8390 // stack allocated.
8391 if (function->scope()->arguments() != NULL) {
8392 if (!FLAG_inline_arguments) {
8393 TraceInline(target, caller, "target uses arguments object");
8394 return false;
8395 }
8396 }
8397
8398 // Unsupported variable references present.
8399 if (function->scope()->this_function_var() != nullptr ||
8400 function->scope()->new_target_var() != nullptr) {
8401 TraceInline(target, caller, "target uses new target or this function");
8402 return false;
8403 }
8404
8405 // All declarations must be inlineable.
8406 ZoneList<Declaration*>* decls = target_info.scope()->declarations();
8407 int decl_count = decls->length();
8408 for (int i = 0; i < decl_count; ++i) {
8409 if (!decls->at(i)->IsInlineable()) {
8410 TraceInline(target, caller, "target has non-trivial declaration");
8411 return false;
8412 }
8413 }
8414
8415 // In strong mode it is an error to call a function with too few arguments.
8416 // In that case do not inline because then the arity check would be skipped.
8417 if (is_strong(function->language_mode()) &&
8418 arguments_count < function->parameter_count()) {
8419 TraceInline(target, caller,
8420 "too few arguments passed to a strong function");
8421 return false;
8422 }
8423
8424 // Generate the deoptimization data for the unoptimized version of
8425 // the target function if we don't already have it.
8426 if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
8427 TraceInline(target, caller, "could not generate deoptimization info");
8428 return false;
8429 }
8430 // Remember that we inlined this function. This needs to be called right
8431 // after the EnsureDeoptimizationSupport call so that the code flusher
8432 // does not remove the code with the deoptimization support.
8433 top_info()->AddInlinedFunction(target_info.shared_info());
8434
8435 // ----------------------------------------------------------------
8436 // After this point, we've made a decision to inline this function (so
8437 // TryInline should always return true).
8438
8439 // Type-check the inlined function.
8440 DCHECK(target_shared->has_deoptimization_support());
8441 AstTyper(target_info.isolate(), target_info.zone(), target_info.closure(),
8442 target_info.scope(), target_info.osr_ast_id(), target_info.literal())
8443 .Run();
8444
8445 int inlining_id = 0;
8446 if (top_info()->is_tracking_positions()) {
8447 inlining_id = top_info()->TraceInlinedFunction(
8448 target_shared, source_position(), function_state()->inlining_id());
8449 }
8450
8451 // Save the pending call context. Set up new one for the inlined function.
8452 // The function state is new-allocated because we need to delete it
8453 // in two different places.
8454 FunctionState* target_state =
8455 new FunctionState(this, &target_info, inlining_kind, inlining_id);
8456
8457 HConstant* undefined = graph()->GetConstantUndefined();
8458
8459 HEnvironment* inner_env =
8460 environment()->CopyForInlining(target,
8461 arguments_count,
8462 function,
8463 undefined,
8464 function_state()->inlining_kind());
8465
8466 HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
8467 inner_env->BindContext(context);
8468
8469 // Create a dematerialized arguments object for the function, also copy the
8470 // current arguments values to use them for materialization.
8471 HEnvironment* arguments_env = inner_env->arguments_environment();
8472 int parameter_count = arguments_env->parameter_count();
8473 HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
8474 for (int i = 0; i < parameter_count; i++) {
8475 arguments_object->AddArgument(arguments_env->Lookup(i), zone());
8476 }
8477
8478 // If the function uses arguments object then bind bind one.
8479 if (function->scope()->arguments() != NULL) {
8480 DCHECK(function->scope()->arguments()->IsStackAllocated());
8481 inner_env->Bind(function->scope()->arguments(), arguments_object);
8482 }
8483
8484 // Capture the state before invoking the inlined function for deopt in the
8485 // inlined function. This simulate has no bailout-id since it's not directly
8486 // reachable for deopt, and is only used to capture the state. If the simulate
8487 // becomes reachable by merging, the ast id of the simulate merged into it is
8488 // adopted.
8489 Add<HSimulate>(BailoutId::None());
8490
8491 current_block()->UpdateEnvironment(inner_env);
8492 Scope* saved_scope = scope();
8493 set_scope(target_info.scope());
8494 HEnterInlined* enter_inlined =
8495 Add<HEnterInlined>(return_id, target, context, arguments_count, function,
8496 function_state()->inlining_kind(),
8497 function->scope()->arguments(), arguments_object);
8498 if (top_info()->is_tracking_positions()) {
8499 enter_inlined->set_inlining_id(inlining_id);
8500 }
8501 function_state()->set_entry(enter_inlined);
8502
8503 VisitDeclarations(target_info.scope()->declarations());
8504 VisitStatements(function->body());
8505 set_scope(saved_scope);
8506 if (HasStackOverflow()) {
8507 // Bail out if the inline function did, as we cannot residualize a call
8508 // instead, but do not disable optimization for the outer function.
8509 TraceInline(target, caller, "inline graph construction failed");
8510 target_shared->DisableOptimization(kInliningBailedOut);
8511 current_info()->RetryOptimization(kInliningBailedOut);
8512 delete target_state;
8513 return true;
8514 }
8515
8516 // Update inlined nodes count.
8517 inlined_count_ += nodes_added;
8518
8519 Handle<Code> unoptimized_code(target_shared->code());
8520 DCHECK(unoptimized_code->kind() == Code::FUNCTION);
8521 Handle<TypeFeedbackInfo> type_info(
8522 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
8523 graph()->update_type_change_checksum(type_info->own_type_change_checksum());
8524
8525 TraceInline(target, caller, NULL);
8526
8527 if (current_block() != NULL) {
8528 FunctionState* state = function_state();
8529 if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
8530 // Falling off the end of an inlined construct call. In a test context the
8531 // return value will always evaluate to true, in a value context the
8532 // return value is the newly allocated receiver.
8533 if (call_context()->IsTest()) {
8534 Goto(inlined_test_context()->if_true(), state);
8535 } else if (call_context()->IsEffect()) {
8536 Goto(function_return(), state);
8537 } else {
8538 DCHECK(call_context()->IsValue());
8539 AddLeaveInlined(implicit_return_value, state);
8540 }
8541 } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
8542 // Falling off the end of an inlined setter call. The returned value is
8543 // never used, the value of an assignment is always the value of the RHS
8544 // of the assignment.
8545 if (call_context()->IsTest()) {
8546 inlined_test_context()->ReturnValue(implicit_return_value);
8547 } else if (call_context()->IsEffect()) {
8548 Goto(function_return(), state);
8549 } else {
8550 DCHECK(call_context()->IsValue());
8551 AddLeaveInlined(implicit_return_value, state);
8552 }
8553 } else {
8554 // Falling off the end of a normal inlined function. This basically means
8555 // returning undefined.
8556 if (call_context()->IsTest()) {
8557 Goto(inlined_test_context()->if_false(), state);
8558 } else if (call_context()->IsEffect()) {
8559 Goto(function_return(), state);
8560 } else {
8561 DCHECK(call_context()->IsValue());
8562 AddLeaveInlined(undefined, state);
8563 }
8564 }
8565 }
8566
8567 // Fix up the function exits.
8568 if (inlined_test_context() != NULL) {
8569 HBasicBlock* if_true = inlined_test_context()->if_true();
8570 HBasicBlock* if_false = inlined_test_context()->if_false();
8571
8572 HEnterInlined* entry = function_state()->entry();
8573
8574 // Pop the return test context from the expression context stack.
8575 DCHECK(ast_context() == inlined_test_context());
8576 ClearInlinedTestContext();
8577 delete target_state;
8578
8579 // Forward to the real test context.
8580 if (if_true->HasPredecessor()) {
8581 entry->RegisterReturnTarget(if_true, zone());
8582 if_true->SetJoinId(ast_id);
8583 HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8584 Goto(if_true, true_target, function_state());
8585 }
8586 if (if_false->HasPredecessor()) {
8587 entry->RegisterReturnTarget(if_false, zone());
8588 if_false->SetJoinId(ast_id);
8589 HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8590 Goto(if_false, false_target, function_state());
8591 }
8592 set_current_block(NULL);
8593 return true;
8594
8595 } else if (function_return()->HasPredecessor()) {
8596 function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8597 function_return()->SetJoinId(ast_id);
8598 set_current_block(function_return());
8599 } else {
8600 set_current_block(NULL);
8601 }
8602 delete target_state;
8603 return true;
8604}
8605
8606
8607bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
8608 return TryInline(expr->target(), expr->arguments()->length(), NULL,
8609 expr->id(), expr->ReturnId(), NORMAL_RETURN);
8610}
8611
8612
8613bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
8614 HValue* implicit_return_value) {
8615 return TryInline(expr->target(), expr->arguments()->length(),
8616 implicit_return_value, expr->id(), expr->ReturnId(),
8617 CONSTRUCT_CALL_RETURN);
8618}
8619
8620
8621bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
8622 Handle<Map> receiver_map,
8623 BailoutId ast_id,
8624 BailoutId return_id) {
8625 if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
8626 return TryInline(getter, 0, NULL, ast_id, return_id, GETTER_CALL_RETURN);
8627}
8628
8629
8630bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
8631 Handle<Map> receiver_map,
8632 BailoutId id,
8633 BailoutId assignment_id,
8634 HValue* implicit_return_value) {
8635 if (TryInlineApiSetter(setter, receiver_map, id)) return true;
8636 return TryInline(setter, 1, implicit_return_value, id, assignment_id,
8637 SETTER_CALL_RETURN);
8638}
8639
8640
8641bool HOptimizedGraphBuilder::TryInlineIndirectCall(Handle<JSFunction> function,
8642 Call* expr,
8643 int arguments_count) {
8644 return TryInline(function, arguments_count, NULL, expr->id(),
8645 expr->ReturnId(), NORMAL_RETURN);
8646}
8647
8648
8649bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
8650 if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8651 BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8652 switch (id) {
8653 case kMathExp:
8654 if (!FLAG_fast_math) break;
8655 // Fall through if FLAG_fast_math.
8656 case kMathRound:
8657 case kMathFround:
8658 case kMathFloor:
8659 case kMathAbs:
8660 case kMathSqrt:
8661 case kMathLog:
8662 case kMathClz32:
8663 if (expr->arguments()->length() == 1) {
8664 HValue* argument = Pop();
8665 Drop(2); // Receiver and function.
8666 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8667 ast_context()->ReturnInstruction(op, expr->id());
8668 return true;
8669 }
8670 break;
8671 case kMathImul:
8672 if (expr->arguments()->length() == 2) {
8673 HValue* right = Pop();
8674 HValue* left = Pop();
8675 Drop(2); // Receiver and function.
8676 HInstruction* op =
8677 HMul::NewImul(isolate(), zone(), context(), left, right);
8678 ast_context()->ReturnInstruction(op, expr->id());
8679 return true;
8680 }
8681 break;
8682 default:
8683 // Not supported for inlining yet.
8684 break;
8685 }
8686 return false;
8687}
8688
8689
8690// static
8691bool HOptimizedGraphBuilder::IsReadOnlyLengthDescriptor(
8692 Handle<Map> jsarray_map) {
8693 DCHECK(!jsarray_map->is_dictionary_map());
8694 Isolate* isolate = jsarray_map->GetIsolate();
8695 Handle<Name> length_string = isolate->factory()->length_string();
8696 DescriptorArray* descriptors = jsarray_map->instance_descriptors();
8697 int number = descriptors->SearchWithCache(*length_string, *jsarray_map);
8698 DCHECK_NE(DescriptorArray::kNotFound, number);
8699 return descriptors->GetDetails(number).IsReadOnly();
8700}
8701
8702
8703// static
8704bool HOptimizedGraphBuilder::CanInlineArrayResizeOperation(
8705 Handle<Map> receiver_map) {
8706 return !receiver_map.is_null() && receiver_map->prototype()->IsJSObject() &&
8707 receiver_map->instance_type() == JS_ARRAY_TYPE &&
8708 IsFastElementsKind(receiver_map->elements_kind()) &&
8709 !receiver_map->is_dictionary_map() && !receiver_map->is_observed() &&
8710 receiver_map->is_extensible() &&
8711 (!receiver_map->is_prototype_map() || receiver_map->is_stable()) &&
8712 !IsReadOnlyLengthDescriptor(receiver_map);
8713}
8714
8715
8716bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
8717 Call* expr, Handle<JSFunction> function, Handle<Map> receiver_map,
8718 int args_count_no_receiver) {
8719 if (!function->shared()->HasBuiltinFunctionId()) return false;
8720 BuiltinFunctionId id = function->shared()->builtin_function_id();
8721 int argument_count = args_count_no_receiver + 1; // Plus receiver.
8722
8723 if (receiver_map.is_null()) {
8724 HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
8725 if (receiver->IsConstant() &&
8726 HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
8727 receiver_map =
8728 handle(Handle<HeapObject>::cast(
8729 HConstant::cast(receiver)->handle(isolate()))->map());
8730 }
8731 }
8732 // Try to inline calls like Math.* as operations in the calling function.
8733 switch (id) {
8734 case kStringCharCodeAt:
8735 case kStringCharAt:
8736 if (argument_count == 2) {
8737 HValue* index = Pop();
8738 HValue* string = Pop();
8739 Drop(1); // Function.
8740 HInstruction* char_code =
8741 BuildStringCharCodeAt(string, index);
8742 if (id == kStringCharCodeAt) {
8743 ast_context()->ReturnInstruction(char_code, expr->id());
8744 return true;
8745 }
8746 AddInstruction(char_code);
8747 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
8748 ast_context()->ReturnInstruction(result, expr->id());
8749 return true;
8750 }
8751 break;
8752 case kStringFromCharCode:
8753 if (argument_count == 2) {
8754 HValue* argument = Pop();
8755 Drop(2); // Receiver and function.
8756 HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
8757 ast_context()->ReturnInstruction(result, expr->id());
8758 return true;
8759 }
8760 break;
8761 case kMathExp:
8762 if (!FLAG_fast_math) break;
8763 // Fall through if FLAG_fast_math.
8764 case kMathRound:
8765 case kMathFround:
8766 case kMathFloor:
8767 case kMathAbs:
8768 case kMathSqrt:
8769 case kMathLog:
8770 case kMathClz32:
8771 if (argument_count == 2) {
8772 HValue* argument = Pop();
8773 Drop(2); // Receiver and function.
8774 HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8775 ast_context()->ReturnInstruction(op, expr->id());
8776 return true;
8777 }
8778 break;
8779 case kMathPow:
8780 if (argument_count == 3) {
8781 HValue* right = Pop();
8782 HValue* left = Pop();
8783 Drop(2); // Receiver and function.
8784 HInstruction* result = NULL;
8785 // Use sqrt() if exponent is 0.5 or -0.5.
8786 if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
8787 double exponent = HConstant::cast(right)->DoubleValue();
8788 if (exponent == 0.5) {
8789 result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
8790 } else if (exponent == -0.5) {
8791 HValue* one = graph()->GetConstant1();
8792 HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
8793 left, kMathPowHalf);
8794 // MathPowHalf doesn't have side effects so there's no need for
8795 // an environment simulation here.
8796 DCHECK(!sqrt->HasObservableSideEffects());
8797 result = NewUncasted<HDiv>(one, sqrt);
8798 } else if (exponent == 2.0) {
8799 result = NewUncasted<HMul>(left, left);
8800 }
8801 }
8802
8803 if (result == NULL) {
8804 result = NewUncasted<HPower>(left, right);
8805 }
8806 ast_context()->ReturnInstruction(result, expr->id());
8807 return true;
8808 }
8809 break;
8810 case kMathMax:
8811 case kMathMin:
8812 if (argument_count == 3) {
8813 HValue* right = Pop();
8814 HValue* left = Pop();
8815 Drop(2); // Receiver and function.
8816 HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
8817 : HMathMinMax::kMathMax;
8818 HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
8819 ast_context()->ReturnInstruction(result, expr->id());
8820 return true;
8821 }
8822 break;
8823 case kMathImul:
8824 if (argument_count == 3) {
8825 HValue* right = Pop();
8826 HValue* left = Pop();
8827 Drop(2); // Receiver and function.
8828 HInstruction* result =
8829 HMul::NewImul(isolate(), zone(), context(), left, right);
8830 ast_context()->ReturnInstruction(result, expr->id());
8831 return true;
8832 }
8833 break;
8834 case kArrayPop: {
8835 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8836 ElementsKind elements_kind = receiver_map->elements_kind();
8837
8838 Drop(args_count_no_receiver);
8839 HValue* result;
8840 HValue* reduced_length;
8841 HValue* receiver = Pop();
8842
8843 HValue* checked_object = AddCheckMap(receiver, receiver_map);
8844 HValue* length =
8845 Add<HLoadNamedField>(checked_object, nullptr,
8846 HObjectAccess::ForArrayLength(elements_kind));
8847
8848 Drop(1); // Function.
8849
8850 { NoObservableSideEffectsScope scope(this);
8851 IfBuilder length_checker(this);
8852
8853 HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
8854 length, graph()->GetConstant0(), Token::EQ);
8855 length_checker.Then();
8856
8857 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8858
8859 length_checker.Else();
8860 HValue* elements = AddLoadElements(checked_object);
8861 // Ensure that we aren't popping from a copy-on-write array.
8862 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8863 elements = BuildCopyElementsOnWrite(checked_object, elements,
8864 elements_kind, length);
8865 }
8866 reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
8867 result = AddElementAccess(elements, reduced_length, nullptr,
8868 bounds_check, nullptr, elements_kind, LOAD);
8869 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
8870 ? graph()->GetConstantHole()
8871 : Add<HConstant>(HConstant::kHoleNaN);
8872 if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8873 elements_kind = FAST_HOLEY_ELEMENTS;
8874 }
8875 AddElementAccess(elements, reduced_length, hole, bounds_check, nullptr,
8876 elements_kind, STORE);
8877 Add<HStoreNamedField>(
8878 checked_object, HObjectAccess::ForArrayLength(elements_kind),
8879 reduced_length, STORE_TO_INITIALIZED_ENTRY);
8880
8881 if (!ast_context()->IsEffect()) Push(result);
8882
8883 length_checker.End();
8884 }
8885 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8886 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8887 if (!ast_context()->IsEffect()) Drop(1);
8888
8889 ast_context()->ReturnValue(result);
8890 return true;
8891 }
8892 case kArrayPush: {
8893 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8894 ElementsKind elements_kind = receiver_map->elements_kind();
8895
8896 // If there may be elements accessors in the prototype chain, the fast
8897 // inlined version can't be used.
8898 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8899 // If there currently can be no elements accessors on the prototype chain,
8900 // it doesn't mean that there won't be any later. Install a full prototype
8901 // chain check to trap element accessors being installed on the prototype
8902 // chain, which would cause elements to go to dictionary mode and result
8903 // in a map change.
8904 Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8905 BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
8906
8907 // Protect against adding elements to the Array prototype, which needs to
8908 // route through appropriate bottlenecks.
8909 if (isolate()->IsFastArrayConstructorPrototypeChainIntact() &&
8910 !prototype->IsJSArray()) {
8911 return false;
8912 }
8913
8914 const int argc = args_count_no_receiver;
8915 if (argc != 1) return false;
8916
8917 HValue* value_to_push = Pop();
8918 HValue* array = Pop();
8919 Drop(1); // Drop function.
8920
8921 HInstruction* new_size = NULL;
8922 HValue* length = NULL;
8923
8924 {
8925 NoObservableSideEffectsScope scope(this);
8926
8927 length = Add<HLoadNamedField>(
8928 array, nullptr, HObjectAccess::ForArrayLength(elements_kind));
8929
8930 new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8931
8932 bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8933 HValue* checked_array = Add<HCheckMaps>(array, receiver_map);
8934 BuildUncheckedMonomorphicElementAccess(
8935 checked_array, length, value_to_push, is_array, elements_kind,
8936 STORE, NEVER_RETURN_HOLE, STORE_AND_GROW_NO_TRANSITION);
8937
8938 if (!ast_context()->IsEffect()) Push(new_size);
8939 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8940 if (!ast_context()->IsEffect()) Drop(1);
8941 }
8942
8943 ast_context()->ReturnValue(new_size);
8944 return true;
8945 }
8946 case kArrayShift: {
8947 if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8948 ElementsKind kind = receiver_map->elements_kind();
8949
8950 // If there may be elements accessors in the prototype chain, the fast
8951 // inlined version can't be used.
8952 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8953
8954 // If there currently can be no elements accessors on the prototype chain,
8955 // it doesn't mean that there won't be any later. Install a full prototype
8956 // chain check to trap element accessors being installed on the prototype
8957 // chain, which would cause elements to go to dictionary mode and result
8958 // in a map change.
8959 BuildCheckPrototypeMaps(
8960 handle(JSObject::cast(receiver_map->prototype()), isolate()),
8961 Handle<JSObject>::null());
8962
8963 // Threshold for fast inlined Array.shift().
8964 HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
8965
8966 Drop(args_count_no_receiver);
8967 HValue* receiver = Pop();
8968 HValue* function = Pop();
8969 HValue* result;
8970
8971 {
8972 NoObservableSideEffectsScope scope(this);
8973
8974 HValue* length = Add<HLoadNamedField>(
8975 receiver, nullptr, HObjectAccess::ForArrayLength(kind));
8976
8977 IfBuilder if_lengthiszero(this);
8978 HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
8979 length, graph()->GetConstant0(), Token::EQ);
8980 if_lengthiszero.Then();
8981 {
8982 if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8983 }
8984 if_lengthiszero.Else();
8985 {
8986 HValue* elements = AddLoadElements(receiver);
8987
8988 // Check if we can use the fast inlined Array.shift().
8989 IfBuilder if_inline(this);
8990 if_inline.If<HCompareNumericAndBranch>(
8991 length, inline_threshold, Token::LTE);
8992 if (IsFastSmiOrObjectElementsKind(kind)) {
8993 // We cannot handle copy-on-write backing stores here.
8994 if_inline.AndIf<HCompareMap>(
8995 elements, isolate()->factory()->fixed_array_map());
8996 }
8997 if_inline.Then();
8998 {
8999 // Remember the result.
9000 if (!ast_context()->IsEffect()) {
9001 Push(AddElementAccess(elements, graph()->GetConstant0(), nullptr,
9002 lengthiszero, nullptr, kind, LOAD));
9003 }
9004
9005 // Compute the new length.
9006 HValue* new_length = AddUncasted<HSub>(
9007 length, graph()->GetConstant1());
9008 new_length->ClearFlag(HValue::kCanOverflow);
9009
9010 // Copy the remaining elements.
9011 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
9012 {
9013 HValue* new_key = loop.BeginBody(
9014 graph()->GetConstant0(), new_length, Token::LT);
9015 HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
9016 key->ClearFlag(HValue::kCanOverflow);
9017 ElementsKind copy_kind =
9018 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
9019 HValue* element =
9020 AddUncasted<HLoadKeyed>(elements, key, lengthiszero, nullptr,
9021 copy_kind, ALLOW_RETURN_HOLE);
9022 HStoreKeyed* store = Add<HStoreKeyed>(elements, new_key, element,
9023 nullptr, copy_kind);
9024 store->SetFlag(HValue::kAllowUndefinedAsNaN);
9025 }
9026 loop.EndBody();
9027
9028 // Put a hole at the end.
9029 HValue* hole = IsFastSmiOrObjectElementsKind(kind)
9030 ? graph()->GetConstantHole()
9031 : Add<HConstant>(HConstant::kHoleNaN);
9032 if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
9033 Add<HStoreKeyed>(elements, new_length, hole, nullptr, kind,
9034 INITIALIZING_STORE);
9035
9036 // Remember new length.
9037 Add<HStoreNamedField>(
9038 receiver, HObjectAccess::ForArrayLength(kind),
9039 new_length, STORE_TO_INITIALIZED_ENTRY);
9040 }
9041 if_inline.Else();
9042 {
9043 Add<HPushArguments>(receiver);
9044 result = Add<HCallJSFunction>(function, 1);
9045 if (!ast_context()->IsEffect()) Push(result);
9046 }
9047 if_inline.End();
9048 }
9049 if_lengthiszero.End();
9050 }
9051 result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
9052 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9053 if (!ast_context()->IsEffect()) Drop(1);
9054 ast_context()->ReturnValue(result);
9055 return true;
9056 }
9057 case kArrayIndexOf:
9058 case kArrayLastIndexOf: {
9059 if (receiver_map.is_null()) return false;
9060 if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
9061 ElementsKind kind = receiver_map->elements_kind();
9062 if (!IsFastElementsKind(kind)) return false;
9063 if (receiver_map->is_observed()) return false;
9064 if (argument_count != 2) return false;
9065 if (!receiver_map->is_extensible()) return false;
9066
9067 // If there may be elements accessors in the prototype chain, the fast
9068 // inlined version can't be used.
9069 if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
9070
9071 // If there currently can be no elements accessors on the prototype chain,
9072 // it doesn't mean that there won't be any later. Install a full prototype
9073 // chain check to trap element accessors being installed on the prototype
9074 // chain, which would cause elements to go to dictionary mode and result
9075 // in a map change.
9076 BuildCheckPrototypeMaps(
9077 handle(JSObject::cast(receiver_map->prototype()), isolate()),
9078 Handle<JSObject>::null());
9079
9080 HValue* search_element = Pop();
9081 HValue* receiver = Pop();
9082 Drop(1); // Drop function.
9083
9084 ArrayIndexOfMode mode = (id == kArrayIndexOf)
9085 ? kFirstIndexOf : kLastIndexOf;
9086 HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
9087
9088 if (!ast_context()->IsEffect()) Push(index);
9089 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
9090 if (!ast_context()->IsEffect()) Drop(1);
9091 ast_context()->ReturnValue(index);
9092 return true;
9093 }
9094 default:
9095 // Not yet supported for inlining.
9096 break;
9097 }
9098 return false;
9099}
9100
9101
9102bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
9103 HValue* receiver) {
9104 Handle<JSFunction> function = expr->target();
9105 int argc = expr->arguments()->length();
9106 SmallMapList receiver_maps;
9107 return TryInlineApiCall(function,
9108 receiver,
9109 &receiver_maps,
9110 argc,
9111 expr->id(),
9112 kCallApiFunction);
9113}
9114
9115
9116bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
9117 Call* expr,
9118 HValue* receiver,
9119 SmallMapList* receiver_maps) {
9120 Handle<JSFunction> function = expr->target();
9121 int argc = expr->arguments()->length();
9122 return TryInlineApiCall(function,
9123 receiver,
9124 receiver_maps,
9125 argc,
9126 expr->id(),
9127 kCallApiMethod);
9128}
9129
9130
9131bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<JSFunction> function,
9132 Handle<Map> receiver_map,
9133 BailoutId ast_id) {
9134 SmallMapList receiver_maps(1, zone());
9135 receiver_maps.Add(receiver_map, zone());
9136 return TryInlineApiCall(function,
9137 NULL, // Receiver is on expression stack.
9138 &receiver_maps,
9139 0,
9140 ast_id,
9141 kCallApiGetter);
9142}
9143
9144
9145bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<JSFunction> function,
9146 Handle<Map> receiver_map,
9147 BailoutId ast_id) {
9148 SmallMapList receiver_maps(1, zone());
9149 receiver_maps.Add(receiver_map, zone());
9150 return TryInlineApiCall(function,
9151 NULL, // Receiver is on expression stack.
9152 &receiver_maps,
9153 1,
9154 ast_id,
9155 kCallApiSetter);
9156}
9157
9158
9159bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
9160 HValue* receiver,
9161 SmallMapList* receiver_maps,
9162 int argc,
9163 BailoutId ast_id,
9164 ApiCallType call_type) {
9165 if (function->context()->native_context() !=
9166 top_info()->closure()->context()->native_context()) {
9167 return false;
9168 }
9169 CallOptimization optimization(function);
9170 if (!optimization.is_simple_api_call()) return false;
9171 Handle<Map> holder_map;
9172 for (int i = 0; i < receiver_maps->length(); ++i) {
9173 auto map = receiver_maps->at(i);
9174 // Don't inline calls to receivers requiring accesschecks.
9175 if (map->is_access_check_needed()) return false;
9176 }
9177 if (call_type == kCallApiFunction) {
9178 // Cannot embed a direct reference to the global proxy map
9179 // as it maybe dropped on deserialization.
9180 CHECK(!isolate()->serializer_enabled());
9181 DCHECK_EQ(0, receiver_maps->length());
9182 receiver_maps->Add(handle(function->global_proxy()->map()), zone());
9183 }
9184 CallOptimization::HolderLookup holder_lookup =
9185 CallOptimization::kHolderNotFound;
9186 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
9187 receiver_maps->first(), &holder_lookup);
9188 if (holder_lookup == CallOptimization::kHolderNotFound) return false;
9189
9190 if (FLAG_trace_inlining) {
9191 PrintF("Inlining api function ");
9192 function->ShortPrint();
9193 PrintF("\n");
9194 }
9195
9196 bool is_function = false;
9197 bool is_store = false;
9198 switch (call_type) {
9199 case kCallApiFunction:
9200 case kCallApiMethod:
9201 // Need to check that none of the receiver maps could have changed.
9202 Add<HCheckMaps>(receiver, receiver_maps);
9203 // Need to ensure the chain between receiver and api_holder is intact.
9204 if (holder_lookup == CallOptimization::kHolderFound) {
9205 AddCheckPrototypeMaps(api_holder, receiver_maps->first());
9206 } else {
9207 DCHECK_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
9208 }
9209 // Includes receiver.
9210 PushArgumentsFromEnvironment(argc + 1);
9211 is_function = true;
9212 break;
9213 case kCallApiGetter:
9214 // Receiver and prototype chain cannot have changed.
9215 DCHECK_EQ(0, argc);
9216 DCHECK_NULL(receiver);
9217 // Receiver is on expression stack.
9218 receiver = Pop();
9219 Add<HPushArguments>(receiver);
9220 break;
9221 case kCallApiSetter:
9222 {
9223 is_store = true;
9224 // Receiver and prototype chain cannot have changed.
9225 DCHECK_EQ(1, argc);
9226 DCHECK_NULL(receiver);
9227 // Receiver and value are on expression stack.
9228 HValue* value = Pop();
9229 receiver = Pop();
9230 Add<HPushArguments>(receiver, value);
9231 break;
9232 }
9233 }
9234
9235 HValue* holder = NULL;
9236 switch (holder_lookup) {
9237 case CallOptimization::kHolderFound:
9238 holder = Add<HConstant>(api_holder);
9239 break;
9240 case CallOptimization::kHolderIsReceiver:
9241 holder = receiver;
9242 break;
9243 case CallOptimization::kHolderNotFound:
9244 UNREACHABLE();
9245 break;
9246 }
9247 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
9248 Handle<Object> call_data_obj(api_call_info->data(), isolate());
9249 bool call_data_undefined = call_data_obj->IsUndefined();
9250 HValue* call_data = Add<HConstant>(call_data_obj);
9251 ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
9252 ExternalReference ref = ExternalReference(&fun,
9253 ExternalReference::DIRECT_API_CALL,
9254 isolate());
9255 HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
9256
9257 HValue* op_vals[] = {context(), Add<HConstant>(function), call_data, holder,
9258 api_function_address, nullptr};
9259
9260 HInstruction* call = nullptr;
9261 if (!is_function) {
9262 CallApiAccessorStub stub(isolate(), is_store, call_data_undefined);
9263 Handle<Code> code = stub.GetCode();
9264 HConstant* code_value = Add<HConstant>(code);
9265 ApiAccessorDescriptor descriptor(isolate());
9266 call = New<HCallWithDescriptor>(
9267 code_value, argc + 1, descriptor,
9268 Vector<HValue*>(op_vals, arraysize(op_vals) - 1));
9269 } else if (argc <= CallApiFunctionWithFixedArgsStub::kMaxFixedArgs) {
9270 CallApiFunctionWithFixedArgsStub stub(isolate(), argc, call_data_undefined);
9271 Handle<Code> code = stub.GetCode();
9272 HConstant* code_value = Add<HConstant>(code);
9273 ApiFunctionWithFixedArgsDescriptor descriptor(isolate());
9274 call = New<HCallWithDescriptor>(
9275 code_value, argc + 1, descriptor,
9276 Vector<HValue*>(op_vals, arraysize(op_vals) - 1));
9277 Drop(1); // Drop function.
9278 } else {
9279 op_vals[arraysize(op_vals) - 1] = Add<HConstant>(argc);
9280 CallApiFunctionStub stub(isolate(), call_data_undefined);
9281 Handle<Code> code = stub.GetCode();
9282 HConstant* code_value = Add<HConstant>(code);
9283 ApiFunctionDescriptor descriptor(isolate());
9284 call =
9285 New<HCallWithDescriptor>(code_value, argc + 1, descriptor,
9286 Vector<HValue*>(op_vals, arraysize(op_vals)));
9287 Drop(1); // Drop function.
9288 }
9289
9290 ast_context()->ReturnInstruction(call, ast_id);
9291 return true;
9292}
9293
9294
9295void HOptimizedGraphBuilder::HandleIndirectCall(Call* expr, HValue* function,
9296 int arguments_count) {
9297 Handle<JSFunction> known_function;
9298 int args_count_no_receiver = arguments_count - 1;
9299 if (function->IsConstant() &&
9300 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9301 known_function =
9302 Handle<JSFunction>::cast(HConstant::cast(function)->handle(isolate()));
9303 if (TryInlineBuiltinMethodCall(expr, known_function, Handle<Map>(),
9304 args_count_no_receiver)) {
9305 if (FLAG_trace_inlining) {
9306 PrintF("Inlining builtin ");
9307 known_function->ShortPrint();
9308 PrintF("\n");
9309 }
9310 return;
9311 }
9312
9313 if (TryInlineIndirectCall(known_function, expr, args_count_no_receiver)) {
9314 return;
9315 }
9316 }
9317
9318 PushArgumentsFromEnvironment(arguments_count);
9319 HInvokeFunction* call =
9320 New<HInvokeFunction>(function, known_function, arguments_count);
9321 Drop(1); // Function
9322 ast_context()->ReturnInstruction(call, expr->id());
9323}
9324
9325
9326bool HOptimizedGraphBuilder::TryIndirectCall(Call* expr) {
9327 DCHECK(expr->expression()->IsProperty());
9328
9329 if (!expr->IsMonomorphic()) {
9330 return false;
9331 }
9332 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9333 if (function_map->instance_type() != JS_FUNCTION_TYPE ||
9334 !expr->target()->shared()->HasBuiltinFunctionId()) {
9335 return false;
9336 }
9337
9338 switch (expr->target()->shared()->builtin_function_id()) {
9339 case kFunctionCall: {
9340 if (expr->arguments()->length() == 0) return false;
9341 BuildFunctionCall(expr);
9342 return true;
9343 }
9344 case kFunctionApply: {
9345 // For .apply, only the pattern f.apply(receiver, arguments)
9346 // is supported.
9347 if (current_info()->scope()->arguments() == NULL) return false;
9348
9349 if (!CanBeFunctionApplyArguments(expr)) return false;
9350
9351 BuildFunctionApply(expr);
9352 return true;
9353 }
9354 default: { return false; }
9355 }
9356 UNREACHABLE();
9357}
9358
9359
9360// f.apply(...)
9361void HOptimizedGraphBuilder::BuildFunctionApply(Call* expr) {
9362 ZoneList<Expression*>* args = expr->arguments();
9363 CHECK_ALIVE(VisitForValue(args->at(0)));
9364 HValue* receiver = Pop(); // receiver
9365 HValue* function = Pop(); // f
9366 Drop(1); // apply
9367
9368 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9369 HValue* checked_function = AddCheckMap(function, function_map);
9370
9371 if (function_state()->outer() == NULL) {
9372 HInstruction* elements = Add<HArgumentsElements>(false);
9373 HInstruction* length = Add<HArgumentsLength>(elements);
9374 HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
9375 HInstruction* result = New<HApplyArguments>(function,
9376 wrapped_receiver,
9377 length,
9378 elements);
9379 ast_context()->ReturnInstruction(result, expr->id());
9380 } else {
9381 // We are inside inlined function and we know exactly what is inside
9382 // arguments object. But we need to be able to materialize at deopt.
9383 DCHECK_EQ(environment()->arguments_environment()->parameter_count(),
9384 function_state()->entry()->arguments_object()->arguments_count());
9385 HArgumentsObject* args = function_state()->entry()->arguments_object();
9386 const ZoneList<HValue*>* arguments_values = args->arguments_values();
9387 int arguments_count = arguments_values->length();
9388 Push(function);
9389 Push(BuildWrapReceiver(receiver, checked_function));
9390 for (int i = 1; i < arguments_count; i++) {
9391 Push(arguments_values->at(i));
9392 }
9393 HandleIndirectCall(expr, function, arguments_count);
9394 }
9395}
9396
9397
9398// f.call(...)
9399void HOptimizedGraphBuilder::BuildFunctionCall(Call* expr) {
9400 HValue* function = Top(); // f
9401 Handle<Map> function_map = expr->GetReceiverTypes()->first();
9402 HValue* checked_function = AddCheckMap(function, function_map);
9403
9404 // f and call are on the stack in the unoptimized code
9405 // during evaluation of the arguments.
9406 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9407
9408 int args_length = expr->arguments()->length();
9409 int receiver_index = args_length - 1;
9410 // Patch the receiver.
9411 HValue* receiver = BuildWrapReceiver(
9412 environment()->ExpressionStackAt(receiver_index), checked_function);
9413 environment()->SetExpressionStackAt(receiver_index, receiver);
9414
9415 // Call must not be on the stack from now on.
9416 int call_index = args_length + 1;
9417 environment()->RemoveExpressionStackAt(call_index);
9418
9419 HandleIndirectCall(expr, function, args_length);
9420}
9421
9422
9423HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
9424 Handle<JSFunction> target) {
9425 SharedFunctionInfo* shared = target->shared();
9426 if (is_sloppy(shared->language_mode()) && !shared->native()) {
9427 // Cannot embed a direct reference to the global proxy
9428 // as is it dropped on deserialization.
9429 CHECK(!isolate()->serializer_enabled());
9430 Handle<JSObject> global_proxy(target->context()->global_proxy());
9431 return Add<HConstant>(global_proxy);
9432 }
9433 return graph()->GetConstantUndefined();
9434}
9435
9436
9437void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
9438 int arguments_count,
9439 HValue* function,
9440 Handle<AllocationSite> site) {
9441 Add<HCheckValue>(function, array_function());
9442
9443 if (IsCallArrayInlineable(arguments_count, site)) {
9444 BuildInlinedCallArray(expression, arguments_count, site);
9445 return;
9446 }
9447
9448 HInstruction* call = PreProcessCall(New<HCallNewArray>(
9449 function, arguments_count + 1, site->GetElementsKind(), site));
9450 if (expression->IsCall()) {
9451 Drop(1);
9452 }
9453 ast_context()->ReturnInstruction(call, expression->id());
9454}
9455
9456
9457HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver,
9458 HValue* search_element,
9459 ElementsKind kind,
9460 ArrayIndexOfMode mode) {
9461 DCHECK(IsFastElementsKind(kind));
9462
9463 NoObservableSideEffectsScope no_effects(this);
9464
9465 HValue* elements = AddLoadElements(receiver);
9466 HValue* length = AddLoadArrayLength(receiver, kind);
9467
9468 HValue* initial;
9469 HValue* terminating;
9470 Token::Value token;
9471 LoopBuilder::Direction direction;
9472 if (mode == kFirstIndexOf) {
9473 initial = graph()->GetConstant0();
9474 terminating = length;
9475 token = Token::LT;
9476 direction = LoopBuilder::kPostIncrement;
9477 } else {
9478 DCHECK_EQ(kLastIndexOf, mode);
9479 initial = length;
9480 terminating = graph()->GetConstant0();
9481 token = Token::GT;
9482 direction = LoopBuilder::kPreDecrement;
9483 }
9484
9485 Push(graph()->GetConstantMinus1());
9486 if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
9487 // Make sure that we can actually compare numbers correctly below, see
9488 // https://code.google.com/p/chromium/issues/detail?id=407946 for details.
9489 search_element = AddUncasted<HForceRepresentation>(
9490 search_element, IsFastSmiElementsKind(kind) ? Representation::Smi()
9491 : Representation::Double());
9492
9493 LoopBuilder loop(this, context(), direction);
9494 {
9495 HValue* index = loop.BeginBody(initial, terminating, token);
9496 HValue* element = AddUncasted<HLoadKeyed>(
9497 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9498 IfBuilder if_issame(this);
9499 if_issame.If<HCompareNumericAndBranch>(element, search_element,
9500 Token::EQ_STRICT);
9501 if_issame.Then();
9502 {
9503 Drop(1);
9504 Push(index);
9505 loop.Break();
9506 }
9507 if_issame.End();
9508 }
9509 loop.EndBody();
9510 } else {
9511 IfBuilder if_isstring(this);
9512 if_isstring.If<HIsStringAndBranch>(search_element);
9513 if_isstring.Then();
9514 {
9515 LoopBuilder loop(this, context(), direction);
9516 {
9517 HValue* index = loop.BeginBody(initial, terminating, token);
9518 HValue* element = AddUncasted<HLoadKeyed>(
9519 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9520 IfBuilder if_issame(this);
9521 if_issame.If<HIsStringAndBranch>(element);
9522 if_issame.AndIf<HStringCompareAndBranch>(
9523 element, search_element, Token::EQ_STRICT);
9524 if_issame.Then();
9525 {
9526 Drop(1);
9527 Push(index);
9528 loop.Break();
9529 }
9530 if_issame.End();
9531 }
9532 loop.EndBody();
9533 }
9534 if_isstring.Else();
9535 {
9536 IfBuilder if_isnumber(this);
9537 if_isnumber.If<HIsSmiAndBranch>(search_element);
9538 if_isnumber.OrIf<HCompareMap>(
9539 search_element, isolate()->factory()->heap_number_map());
9540 if_isnumber.Then();
9541 {
9542 HValue* search_number =
9543 AddUncasted<HForceRepresentation>(search_element,
9544 Representation::Double());
9545 LoopBuilder loop(this, context(), direction);
9546 {
9547 HValue* index = loop.BeginBody(initial, terminating, token);
9548 HValue* element = AddUncasted<HLoadKeyed>(
9549 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9550
9551 IfBuilder if_element_isnumber(this);
9552 if_element_isnumber.If<HIsSmiAndBranch>(element);
9553 if_element_isnumber.OrIf<HCompareMap>(
9554 element, isolate()->factory()->heap_number_map());
9555 if_element_isnumber.Then();
9556 {
9557 HValue* number =
9558 AddUncasted<HForceRepresentation>(element,
9559 Representation::Double());
9560 IfBuilder if_issame(this);
9561 if_issame.If<HCompareNumericAndBranch>(
9562 number, search_number, Token::EQ_STRICT);
9563 if_issame.Then();
9564 {
9565 Drop(1);
9566 Push(index);
9567 loop.Break();
9568 }
9569 if_issame.End();
9570 }
9571 if_element_isnumber.End();
9572 }
9573 loop.EndBody();
9574 }
9575 if_isnumber.Else();
9576 {
9577 LoopBuilder loop(this, context(), direction);
9578 {
9579 HValue* index = loop.BeginBody(initial, terminating, token);
9580 HValue* element = AddUncasted<HLoadKeyed>(
9581 elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9582 IfBuilder if_issame(this);
9583 if_issame.If<HCompareObjectEqAndBranch>(
9584 element, search_element);
9585 if_issame.Then();
9586 {
9587 Drop(1);
9588 Push(index);
9589 loop.Break();
9590 }
9591 if_issame.End();
9592 }
9593 loop.EndBody();
9594 }
9595 if_isnumber.End();
9596 }
9597 if_isstring.End();
9598 }
9599
9600 return Pop();
9601}
9602
9603
9604bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
9605 if (!array_function().is_identical_to(expr->target())) {
9606 return false;
9607 }
9608
9609 Handle<AllocationSite> site = expr->allocation_site();
9610 if (site.is_null()) return false;
9611
9612 BuildArrayCall(expr,
9613 expr->arguments()->length(),
9614 function,
9615 site);
9616 return true;
9617}
9618
9619
9620bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
9621 HValue* function) {
9622 if (!array_function().is_identical_to(expr->target())) {
9623 return false;
9624 }
9625
9626 Handle<AllocationSite> site = expr->allocation_site();
9627 if (site.is_null()) return false;
9628
9629 BuildArrayCall(expr, expr->arguments()->length(), function, site);
9630 return true;
9631}
9632
9633
9634bool HOptimizedGraphBuilder::CanBeFunctionApplyArguments(Call* expr) {
9635 ZoneList<Expression*>* args = expr->arguments();
9636 if (args->length() != 2) return false;
9637 VariableProxy* arg_two = args->at(1)->AsVariableProxy();
9638 if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
9639 HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
9640 if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
9641 return true;
9642}
9643
9644
9645void HOptimizedGraphBuilder::VisitCall(Call* expr) {
9646 DCHECK(!HasStackOverflow());
9647 DCHECK(current_block() != NULL);
9648 DCHECK(current_block()->HasPredecessor());
9649 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
9650 Expression* callee = expr->expression();
9651 int argument_count = expr->arguments()->length() + 1; // Plus receiver.
9652 HInstruction* call = NULL;
9653
9654 Property* prop = callee->AsProperty();
9655 if (prop != NULL) {
9656 CHECK_ALIVE(VisitForValue(prop->obj()));
9657 HValue* receiver = Top();
9658
9659 SmallMapList* maps;
9660 ComputeReceiverTypes(expr, receiver, &maps, zone());
9661
9662 if (prop->key()->IsPropertyName() && maps->length() > 0) {
9663 Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9664 PropertyAccessInfo info(this, LOAD, maps->first(), name);
9665 if (!info.CanAccessAsMonomorphic(maps)) {
9666 HandlePolymorphicCallNamed(expr, receiver, maps, name);
9667 return;
9668 }
9669 }
9670 HValue* key = NULL;
9671 if (!prop->key()->IsPropertyName()) {
9672 CHECK_ALIVE(VisitForValue(prop->key()));
9673 key = Pop();
9674 }
9675
9676 CHECK_ALIVE(PushLoad(prop, receiver, key));
9677 HValue* function = Pop();
9678
9679 if (function->IsConstant() &&
9680 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9681 // Push the function under the receiver.
9682 environment()->SetExpressionStackAt(0, function);
9683 Push(receiver);
9684
9685 Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9686 HConstant::cast(function)->handle(isolate()));
9687 expr->set_target(known_function);
9688
9689 if (TryIndirectCall(expr)) return;
9690 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9691
9692 Handle<Map> map = maps->length() == 1 ? maps->first() : Handle<Map>();
9693 if (TryInlineBuiltinMethodCall(expr, known_function, map,
9694 expr->arguments()->length())) {
9695 if (FLAG_trace_inlining) {
9696 PrintF("Inlining builtin ");
9697 known_function->ShortPrint();
9698 PrintF("\n");
9699 }
9700 return;
9701 }
9702 if (TryInlineApiMethodCall(expr, receiver, maps)) return;
9703
9704 // Wrap the receiver if necessary.
9705 if (NeedsWrapping(maps->first(), known_function)) {
9706 // Since HWrapReceiver currently cannot actually wrap numbers and
9707 // strings, use the regular CallFunctionStub for method calls to wrap
9708 // the receiver.
9709 // TODO(verwaest): Support creation of value wrappers directly in
9710 // HWrapReceiver.
9711 call = New<HCallFunction>(function, argument_count,
9712 ConvertReceiverMode::kNotNullOrUndefined);
9713 } else if (TryInlineCall(expr)) {
9714 return;
9715 } else {
9716 call = BuildCallConstantFunction(known_function, argument_count);
9717 }
9718
9719 } else {
9720 ArgumentsAllowedFlag arguments_flag = ARGUMENTS_NOT_ALLOWED;
9721 if (CanBeFunctionApplyArguments(expr) && expr->is_uninitialized()) {
9722 // We have to use EAGER deoptimization here because Deoptimizer::SOFT
9723 // gets ignored by the always-opt flag, which leads to incorrect code.
9724 Add<HDeoptimize>(
9725 Deoptimizer::kInsufficientTypeFeedbackForCallWithArguments,
9726 Deoptimizer::EAGER);
9727 arguments_flag = ARGUMENTS_FAKED;
9728 }
9729
9730 // Push the function under the receiver.
9731 environment()->SetExpressionStackAt(0, function);
9732 Push(receiver);
9733
9734 CHECK_ALIVE(VisitExpressions(expr->arguments(), arguments_flag));
9735 call = New<HCallFunction>(function, argument_count,
9736 ConvertReceiverMode::kNotNullOrUndefined);
9737 }
9738 PushArgumentsFromEnvironment(argument_count);
9739
9740 } else {
9741 VariableProxy* proxy = expr->expression()->AsVariableProxy();
9742 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
9743 return Bailout(kPossibleDirectCallToEval);
9744 }
9745
9746 // The function is on the stack in the unoptimized code during
9747 // evaluation of the arguments.
9748 CHECK_ALIVE(VisitForValue(expr->expression()));
9749 HValue* function = Top();
9750 if (function->IsConstant() &&
9751 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9752 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
9753 Handle<JSFunction> target = Handle<JSFunction>::cast(constant);
9754 expr->SetKnownGlobalTarget(target);
9755 }
9756
9757 // Placeholder for the receiver.
9758 Push(graph()->GetConstantUndefined());
9759 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9760
9761 if (expr->IsMonomorphic() &&
9762 !IsClassConstructor(expr->target()->shared()->kind())) {
9763 Add<HCheckValue>(function, expr->target());
9764
9765 // Patch the global object on the stack by the expected receiver.
9766 HValue* receiver = ImplicitReceiverFor(function, expr->target());
9767 const int receiver_index = argument_count - 1;
9768 environment()->SetExpressionStackAt(receiver_index, receiver);
9769
9770 if (TryInlineBuiltinFunctionCall(expr)) {
9771 if (FLAG_trace_inlining) {
9772 PrintF("Inlining builtin ");
9773 expr->target()->ShortPrint();
9774 PrintF("\n");
9775 }
9776 return;
9777 }
9778 if (TryInlineApiFunctionCall(expr, receiver)) return;
9779 if (TryHandleArrayCall(expr, function)) return;
9780 if (TryInlineCall(expr)) return;
9781
9782 PushArgumentsFromEnvironment(argument_count);
9783 call = BuildCallConstantFunction(expr->target(), argument_count);
9784 } else {
9785 PushArgumentsFromEnvironment(argument_count);
9786 HCallFunction* call_function = New<HCallFunction>(
9787 function, argument_count, ConvertReceiverMode::kNullOrUndefined);
9788 call = call_function;
9789 if (expr->is_uninitialized() &&
9790 expr->IsUsingCallFeedbackICSlot(isolate())) {
9791 // We've never seen this call before, so let's have Crankshaft learn
9792 // through the type vector.
9793 Handle<TypeFeedbackVector> vector =
9794 handle(current_feedback_vector(), isolate());
9795 FeedbackVectorSlot slot = expr->CallFeedbackICSlot();
9796 call_function->SetVectorAndSlot(vector, slot);
9797 }
9798 }
9799 }
9800
9801 Drop(1); // Drop the function.
9802 return ast_context()->ReturnInstruction(call, expr->id());
9803}
9804
9805
9806void HOptimizedGraphBuilder::BuildInlinedCallArray(
9807 Expression* expression,
9808 int argument_count,
9809 Handle<AllocationSite> site) {
9810 DCHECK(!site.is_null());
9811 DCHECK(argument_count >= 0 && argument_count <= 1);
9812 NoObservableSideEffectsScope no_effects(this);
9813
9814 // We should at least have the constructor on the expression stack.
9815 HValue* constructor = environment()->ExpressionStackAt(argument_count);
9816
9817 // Register on the site for deoptimization if the transition feedback changes.
9818 top_info()->dependencies()->AssumeTransitionStable(site);
9819 ElementsKind kind = site->GetElementsKind();
9820 HInstruction* site_instruction = Add<HConstant>(site);
9821
9822 // In the single constant argument case, we may have to adjust elements kind
9823 // to avoid creating a packed non-empty array.
9824 if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
9825 HValue* argument = environment()->Top();
9826 if (argument->IsConstant()) {
9827 HConstant* constant_argument = HConstant::cast(argument);
9828 DCHECK(constant_argument->HasSmiValue());
9829 int constant_array_size = constant_argument->Integer32Value();
9830 if (constant_array_size != 0) {
9831 kind = GetHoleyElementsKind(kind);
9832 }
9833 }
9834 }
9835
9836 // Build the array.
9837 JSArrayBuilder array_builder(this,
9838 kind,
9839 site_instruction,
9840 constructor,
9841 DISABLE_ALLOCATION_SITES);
9842 HValue* new_object = argument_count == 0
9843 ? array_builder.AllocateEmptyArray()
9844 : BuildAllocateArrayFromLength(&array_builder, Top());
9845
9846 int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
9847 Drop(args_to_drop);
9848 ast_context()->ReturnValue(new_object);
9849}
9850
9851
9852// Checks whether allocation using the given constructor can be inlined.
9853static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
9854 return constructor->has_initial_map() &&
9855 !IsClassConstructor(constructor->shared()->kind()) &&
9856 constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
9857 constructor->initial_map()->instance_size() <
9858 HAllocate::kMaxInlineSize;
9859}
9860
9861
9862bool HOptimizedGraphBuilder::IsCallArrayInlineable(
9863 int argument_count,
9864 Handle<AllocationSite> site) {
9865 Handle<JSFunction> caller = current_info()->closure();
9866 Handle<JSFunction> target = array_function();
9867 // We should have the function plus array arguments on the environment stack.
9868 DCHECK(environment()->length() >= (argument_count + 1));
9869 DCHECK(!site.is_null());
9870
9871 bool inline_ok = false;
9872 if (site->CanInlineCall()) {
9873 // We also want to avoid inlining in certain 1 argument scenarios.
9874 if (argument_count == 1) {
9875 HValue* argument = Top();
9876 if (argument->IsConstant()) {
9877 // Do not inline if the constant length argument is not a smi or
9878 // outside the valid range for unrolled loop initialization.
9879 HConstant* constant_argument = HConstant::cast(argument);
9880 if (constant_argument->HasSmiValue()) {
9881 int value = constant_argument->Integer32Value();
9882 inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
9883 if (!inline_ok) {
9884 TraceInline(target, caller,
9885 "Constant length outside of valid inlining range.");
9886 }
9887 }
9888 } else {
9889 TraceInline(target, caller,
9890 "Dont inline [new] Array(n) where n isn't constant.");
9891 }
9892 } else if (argument_count == 0) {
9893 inline_ok = true;
9894 } else {
9895 TraceInline(target, caller, "Too many arguments to inline.");
9896 }
9897 } else {
9898 TraceInline(target, caller, "AllocationSite requested no inlining.");
9899 }
9900
9901 if (inline_ok) {
9902 TraceInline(target, caller, NULL);
9903 }
9904 return inline_ok;
9905}
9906
9907
9908void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
9909 DCHECK(!HasStackOverflow());
9910 DCHECK(current_block() != NULL);
9911 DCHECK(current_block()->HasPredecessor());
9912 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
9913 int argument_count = expr->arguments()->length() + 1; // Plus constructor.
9914 Factory* factory = isolate()->factory();
9915
9916 // The constructor function is on the stack in the unoptimized code
9917 // during evaluation of the arguments.
9918 CHECK_ALIVE(VisitForValue(expr->expression()));
9919 HValue* function = Top();
9920 CHECK_ALIVE(VisitExpressions(expr->arguments()));
9921
9922 if (function->IsConstant() &&
9923 HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9924 Handle<Object> constant = HConstant::cast(function)->handle(isolate());
9925 expr->SetKnownGlobalTarget(Handle<JSFunction>::cast(constant));
9926 }
9927
9928 if (FLAG_inline_construct &&
9929 expr->IsMonomorphic() &&
9930 IsAllocationInlineable(expr->target())) {
9931 Handle<JSFunction> constructor = expr->target();
9932 DCHECK(
9933 constructor->shared()->construct_stub() ==
9934 isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric) ||
9935 constructor->shared()->construct_stub() ==
9936 isolate()->builtins()->builtin(Builtins::kJSConstructStubApi));
9937 HValue* check = Add<HCheckValue>(function, constructor);
9938
9939 // Force completion of inobject slack tracking before generating
9940 // allocation code to finalize instance size.
9941 constructor->CompleteInobjectSlackTrackingIfActive();
9942
9943 // Calculate instance size from initial map of constructor.
9944 DCHECK(constructor->has_initial_map());
9945 Handle<Map> initial_map(constructor->initial_map());
9946 int instance_size = initial_map->instance_size();
9947
9948 // Allocate an instance of the implicit receiver object.
9949 HValue* size_in_bytes = Add<HConstant>(instance_size);
9950 HAllocationMode allocation_mode;
9951 HAllocate* receiver = BuildAllocate(
9952 size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
9953 receiver->set_known_initial_map(initial_map);
9954
9955 // Initialize map and fields of the newly allocated object.
9956 { NoObservableSideEffectsScope no_effects(this);
9957 DCHECK(initial_map->instance_type() == JS_OBJECT_TYPE);
9958 Add<HStoreNamedField>(receiver,
9959 HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
9960 Add<HConstant>(initial_map));
9961 HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
9962 Add<HStoreNamedField>(receiver,
9963 HObjectAccess::ForMapAndOffset(initial_map,
9964 JSObject::kPropertiesOffset),
9965 empty_fixed_array);
9966 Add<HStoreNamedField>(receiver,
9967 HObjectAccess::ForMapAndOffset(initial_map,
9968 JSObject::kElementsOffset),
9969 empty_fixed_array);
9970 BuildInitializeInobjectProperties(receiver, initial_map);
9971 }
9972
9973 // Replace the constructor function with a newly allocated receiver using
9974 // the index of the receiver from the top of the expression stack.
9975 const int receiver_index = argument_count - 1;
9976 DCHECK(environment()->ExpressionStackAt(receiver_index) == function);
9977 environment()->SetExpressionStackAt(receiver_index, receiver);
9978
9979 if (TryInlineConstruct(expr, receiver)) {
9980 // Inlining worked, add a dependency on the initial map to make sure that
9981 // this code is deoptimized whenever the initial map of the constructor
9982 // changes.
9983 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
9984 return;
9985 }
9986
9987 // TODO(mstarzinger): For now we remove the previous HAllocate and all
9988 // corresponding instructions and instead add HPushArguments for the
9989 // arguments in case inlining failed. What we actually should do is for
9990 // inlining to try to build a subgraph without mutating the parent graph.
9991 HInstruction* instr = current_block()->last();
9992 do {
9993 HInstruction* prev_instr = instr->previous();
9994 instr->DeleteAndReplaceWith(NULL);
9995 instr = prev_instr;
9996 } while (instr != check);
9997 environment()->SetExpressionStackAt(receiver_index, function);
9998 } else {
9999 // The constructor function is both an operand to the instruction and an
10000 // argument to the construct call.
10001 if (TryHandleArrayCallNew(expr, function)) return;
10002 }
10003
10004 HValue* arity = Add<HConstant>(argument_count - 1);
10005 HValue* op_vals[] = {context(), function, function, arity};
10006 Callable callable = CodeFactory::Construct(isolate());
10007 HConstant* stub = Add<HConstant>(callable.code());
10008 PushArgumentsFromEnvironment(argument_count);
10009 HInstruction* construct =
10010 New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
10011 Vector<HValue*>(op_vals, arraysize(op_vals)));
10012 return ast_context()->ReturnInstruction(construct, expr->id());
10013}
10014
10015
10016void HOptimizedGraphBuilder::BuildInitializeInobjectProperties(
10017 HValue* receiver, Handle<Map> initial_map) {
10018 if (initial_map->GetInObjectProperties() != 0) {
10019 HConstant* undefined = graph()->GetConstantUndefined();
10020 for (int i = 0; i < initial_map->GetInObjectProperties(); i++) {
10021 int property_offset = initial_map->GetInObjectPropertyOffset(i);
10022 Add<HStoreNamedField>(receiver, HObjectAccess::ForMapAndOffset(
10023 initial_map, property_offset),
10024 undefined);
10025 }
10026 }
10027}
10028
10029
10030HValue* HGraphBuilder::BuildAllocateEmptyArrayBuffer(HValue* byte_length) {
10031 // We HForceRepresentation here to avoid allocations during an *-to-tagged
10032 // HChange that could cause GC while the array buffer object is not fully
10033 // initialized.
10034 HObjectAccess byte_length_access(HObjectAccess::ForJSArrayBufferByteLength());
10035 byte_length = AddUncasted<HForceRepresentation>(
10036 byte_length, byte_length_access.representation());
10037 HAllocate* result =
10038 BuildAllocate(Add<HConstant>(JSArrayBuffer::kSizeWithInternalFields),
10039 HType::JSObject(), JS_ARRAY_BUFFER_TYPE, HAllocationMode());
10040
10041 HValue* native_context = BuildGetNativeContext();
10042 Add<HStoreNamedField>(
10043 result, HObjectAccess::ForMap(),
10044 Add<HLoadNamedField>(
10045 native_context, nullptr,
10046 HObjectAccess::ForContextSlot(Context::ARRAY_BUFFER_MAP_INDEX)));
10047
10048 HConstant* empty_fixed_array =
10049 Add<HConstant>(isolate()->factory()->empty_fixed_array());
10050 Add<HStoreNamedField>(
10051 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
10052 empty_fixed_array);
10053 Add<HStoreNamedField>(
10054 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
10055 empty_fixed_array);
10056 Add<HStoreNamedField>(
10057 result, HObjectAccess::ForJSArrayBufferBackingStore().WithRepresentation(
10058 Representation::Smi()),
10059 graph()->GetConstant0());
10060 Add<HStoreNamedField>(result, byte_length_access, byte_length);
10061 Add<HStoreNamedField>(result, HObjectAccess::ForJSArrayBufferBitFieldSlot(),
10062 graph()->GetConstant0());
10063 Add<HStoreNamedField>(
10064 result, HObjectAccess::ForJSArrayBufferBitField(),
10065 Add<HConstant>((1 << JSArrayBuffer::IsExternal::kShift) |
10066 (1 << JSArrayBuffer::IsNeuterable::kShift)));
10067
10068 for (int field = 0; field < v8::ArrayBuffer::kInternalFieldCount; ++field) {
10069 Add<HStoreNamedField>(
10070 result,
10071 HObjectAccess::ForObservableJSObjectOffset(
10072 JSArrayBuffer::kSize + field * kPointerSize, Representation::Smi()),
10073 graph()->GetConstant0());
10074 }
10075
10076 return result;
10077}
10078
10079
10080template <class ViewClass>
10081void HGraphBuilder::BuildArrayBufferViewInitialization(
10082 HValue* obj,
10083 HValue* buffer,
10084 HValue* byte_offset,
10085 HValue* byte_length) {
10086
10087 for (int offset = ViewClass::kSize;
10088 offset < ViewClass::kSizeWithInternalFields;
10089 offset += kPointerSize) {
10090 Add<HStoreNamedField>(obj,
10091 HObjectAccess::ForObservableJSObjectOffset(offset),
10092 graph()->GetConstant0());
10093 }
10094
10095 Add<HStoreNamedField>(
10096 obj,
10097 HObjectAccess::ForJSArrayBufferViewByteOffset(),
10098 byte_offset);
10099 Add<HStoreNamedField>(
10100 obj,
10101 HObjectAccess::ForJSArrayBufferViewByteLength(),
10102 byte_length);
10103 Add<HStoreNamedField>(obj, HObjectAccess::ForJSArrayBufferViewBuffer(),
10104 buffer);
10105}
10106
10107
10108void HOptimizedGraphBuilder::GenerateDataViewInitialize(
10109 CallRuntime* expr) {
10110 ZoneList<Expression*>* arguments = expr->arguments();
10111
10112 DCHECK(arguments->length()== 4);
10113 CHECK_ALIVE(VisitForValue(arguments->at(0)));
10114 HValue* obj = Pop();
10115
10116 CHECK_ALIVE(VisitForValue(arguments->at(1)));
10117 HValue* buffer = Pop();
10118
10119 CHECK_ALIVE(VisitForValue(arguments->at(2)));
10120 HValue* byte_offset = Pop();
10121
10122 CHECK_ALIVE(VisitForValue(arguments->at(3)));
10123 HValue* byte_length = Pop();
10124
10125 {
10126 NoObservableSideEffectsScope scope(this);
10127 BuildArrayBufferViewInitialization<JSDataView>(
10128 obj, buffer, byte_offset, byte_length);
10129 }
10130}
10131
10132
10133HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
10134 ExternalArrayType array_type,
10135 bool is_zero_byte_offset,
10136 HValue* buffer, HValue* byte_offset, HValue* length) {
10137 Handle<Map> external_array_map(
10138 isolate()->heap()->MapForFixedTypedArray(array_type));
10139
10140 // The HForceRepresentation is to prevent possible deopt on int-smi
10141 // conversion after allocation but before the new object fields are set.
10142 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10143 HValue* elements = Add<HAllocate>(
10144 Add<HConstant>(FixedTypedArrayBase::kHeaderSize), HType::HeapObject(),
10145 NOT_TENURED, external_array_map->instance_type());
10146
10147 AddStoreMapConstant(elements, external_array_map);
10148 Add<HStoreNamedField>(elements,
10149 HObjectAccess::ForFixedArrayLength(), length);
10150
10151 HValue* backing_store = Add<HLoadNamedField>(
10152 buffer, nullptr, HObjectAccess::ForJSArrayBufferBackingStore());
10153
10154 HValue* typed_array_start;
10155 if (is_zero_byte_offset) {
10156 typed_array_start = backing_store;
10157 } else {
10158 HInstruction* external_pointer =
10159 AddUncasted<HAdd>(backing_store, byte_offset);
10160 // Arguments are checked prior to call to TypedArrayInitialize,
10161 // including byte_offset.
10162 external_pointer->ClearFlag(HValue::kCanOverflow);
10163 typed_array_start = external_pointer;
10164 }
10165
10166 Add<HStoreNamedField>(elements,
10167 HObjectAccess::ForFixedTypedArrayBaseBasePointer(),
10168 graph()->GetConstant0());
10169 Add<HStoreNamedField>(elements,
10170 HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
10171 typed_array_start);
10172
10173 return elements;
10174}
10175
10176
10177HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
10178 ExternalArrayType array_type, size_t element_size,
10179 ElementsKind fixed_elements_kind, HValue* byte_length, HValue* length,
10180 bool initialize) {
10181 STATIC_ASSERT(
10182 (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
10183 HValue* total_size;
10184
10185 // if fixed array's elements are not aligned to object's alignment,
10186 // we need to align the whole array to object alignment.
10187 if (element_size % kObjectAlignment != 0) {
10188 total_size = BuildObjectSizeAlignment(
10189 byte_length, FixedTypedArrayBase::kHeaderSize);
10190 } else {
10191 total_size = AddUncasted<HAdd>(byte_length,
10192 Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
10193 total_size->ClearFlag(HValue::kCanOverflow);
10194 }
10195
10196 // The HForceRepresentation is to prevent possible deopt on int-smi
10197 // conversion after allocation but before the new object fields are set.
10198 length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10199 Handle<Map> fixed_typed_array_map(
10200 isolate()->heap()->MapForFixedTypedArray(array_type));
10201 HAllocate* elements =
10202 Add<HAllocate>(total_size, HType::HeapObject(), NOT_TENURED,
10203 fixed_typed_array_map->instance_type());
10204
10205#ifndef V8_HOST_ARCH_64_BIT
10206 if (array_type == kExternalFloat64Array) {
10207 elements->MakeDoubleAligned();
10208 }
10209#endif
10210
10211 AddStoreMapConstant(elements, fixed_typed_array_map);
10212
10213 Add<HStoreNamedField>(elements,
10214 HObjectAccess::ForFixedArrayLength(),
10215 length);
10216 Add<HStoreNamedField>(
10217 elements, HObjectAccess::ForFixedTypedArrayBaseBasePointer(), elements);
10218
10219 Add<HStoreNamedField>(
10220 elements, HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
10221 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()));
10222
10223 HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
10224
10225 if (initialize) {
10226 LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
10227
10228 HValue* backing_store = AddUncasted<HAdd>(
10229 Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()),
10230 elements, Strength::WEAK, AddOfExternalAndTagged);
10231
10232 HValue* key = builder.BeginBody(
10233 Add<HConstant>(static_cast<int32_t>(0)),
10234 length, Token::LT);
10235 Add<HStoreKeyed>(backing_store, key, filler, elements, fixed_elements_kind);
10236
10237 builder.EndBody();
10238 }
10239 return elements;
10240}
10241
10242
10243void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
10244 CallRuntime* expr) {
10245 ZoneList<Expression*>* arguments = expr->arguments();
10246
10247 static const int kObjectArg = 0;
10248 static const int kArrayIdArg = 1;
10249 static const int kBufferArg = 2;
10250 static const int kByteOffsetArg = 3;
10251 static const int kByteLengthArg = 4;
10252 static const int kInitializeArg = 5;
10253 static const int kArgsLength = 6;
10254 DCHECK(arguments->length() == kArgsLength);
10255
10256
10257 CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
10258 HValue* obj = Pop();
10259
10260 if (!arguments->at(kArrayIdArg)->IsLiteral()) {
10261 // This should never happen in real use, but can happen when fuzzing.
10262 // Just bail out.
10263 Bailout(kNeedSmiLiteral);
10264 return;
10265 }
10266 Handle<Object> value =
10267 static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
10268 if (!value->IsSmi()) {
10269 // This should never happen in real use, but can happen when fuzzing.
10270 // Just bail out.
10271 Bailout(kNeedSmiLiteral);
10272 return;
10273 }
10274 int array_id = Smi::cast(*value)->value();
10275
10276 HValue* buffer;
10277 if (!arguments->at(kBufferArg)->IsNullLiteral()) {
10278 CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
10279 buffer = Pop();
10280 } else {
10281 buffer = NULL;
10282 }
10283
10284 HValue* byte_offset;
10285 bool is_zero_byte_offset;
10286
10287 if (arguments->at(kByteOffsetArg)->IsLiteral()
10288 && Smi::FromInt(0) ==
10289 *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
10290 byte_offset = Add<HConstant>(static_cast<int32_t>(0));
10291 is_zero_byte_offset = true;
10292 } else {
10293 CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
10294 byte_offset = Pop();
10295 is_zero_byte_offset = false;
10296 DCHECK(buffer != NULL);
10297 }
10298
10299 CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
10300 HValue* byte_length = Pop();
10301
10302 CHECK(arguments->at(kInitializeArg)->IsLiteral());
10303 bool initialize = static_cast<Literal*>(arguments->at(kInitializeArg))
10304 ->value()
10305 ->BooleanValue();
10306
10307 NoObservableSideEffectsScope scope(this);
10308 IfBuilder byte_offset_smi(this);
10309
10310 if (!is_zero_byte_offset) {
10311 byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
10312 byte_offset_smi.Then();
10313 }
10314
10315 ExternalArrayType array_type =
10316 kExternalInt8Array; // Bogus initialization.
10317 size_t element_size = 1; // Bogus initialization.
10318 ElementsKind fixed_elements_kind = // Bogus initialization.
10319 INT8_ELEMENTS;
10320 Runtime::ArrayIdToTypeAndSize(array_id,
10321 &array_type,
10322 &fixed_elements_kind,
10323 &element_size);
10324
10325
10326 { // byte_offset is Smi.
10327 HValue* allocated_buffer = buffer;
10328 if (buffer == NULL) {
10329 allocated_buffer = BuildAllocateEmptyArrayBuffer(byte_length);
10330 }
10331 BuildArrayBufferViewInitialization<JSTypedArray>(obj, allocated_buffer,
10332 byte_offset, byte_length);
10333
10334
10335 HInstruction* length = AddUncasted<HDiv>(byte_length,
10336 Add<HConstant>(static_cast<int32_t>(element_size)));
10337
10338 Add<HStoreNamedField>(obj,
10339 HObjectAccess::ForJSTypedArrayLength(),
10340 length);
10341
10342 HValue* elements;
10343 if (buffer != NULL) {
10344 elements = BuildAllocateExternalElements(
10345 array_type, is_zero_byte_offset, buffer, byte_offset, length);
10346 } else {
10347 DCHECK(is_zero_byte_offset);
10348 elements = BuildAllocateFixedTypedArray(array_type, element_size,
10349 fixed_elements_kind, byte_length,
10350 length, initialize);
10351 }
10352 Add<HStoreNamedField>(
10353 obj, HObjectAccess::ForElementsPointer(), elements);
10354 }
10355
10356 if (!is_zero_byte_offset) {
10357 byte_offset_smi.Else();
10358 { // byte_offset is not Smi.
10359 Push(obj);
10360 CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
10361 Push(buffer);
10362 Push(byte_offset);
10363 Push(byte_length);
10364 CHECK_ALIVE(VisitForValue(arguments->at(kInitializeArg)));
10365 PushArgumentsFromEnvironment(kArgsLength);
10366 Add<HCallRuntime>(expr->function(), kArgsLength);
10367 }
10368 }
10369 byte_offset_smi.End();
10370}
10371
10372
10373void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
10374 DCHECK(expr->arguments()->length() == 0);
10375 HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
10376 return ast_context()->ReturnInstruction(max_smi, expr->id());
10377}
10378
10379
10380void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
10381 CallRuntime* expr) {
10382 DCHECK(expr->arguments()->length() == 0);
10383 HConstant* result = New<HConstant>(static_cast<int32_t>(
10384 FLAG_typed_array_max_size_in_heap));
10385 return ast_context()->ReturnInstruction(result, expr->id());
10386}
10387
10388
10389void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
10390 CallRuntime* expr) {
10391 DCHECK(expr->arguments()->length() == 1);
10392 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10393 HValue* buffer = Pop();
10394 HInstruction* result = New<HLoadNamedField>(
10395 buffer, nullptr, HObjectAccess::ForJSArrayBufferByteLength());
10396 return ast_context()->ReturnInstruction(result, expr->id());
10397}
10398
10399
10400void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
10401 CallRuntime* expr) {
10402 NoObservableSideEffectsScope scope(this);
10403 DCHECK(expr->arguments()->length() == 1);
10404 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10405 HValue* view = Pop();
10406
10407 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10408 view, nullptr,
10409 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteLengthOffset)));
10410}
10411
10412
10413void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
10414 CallRuntime* expr) {
10415 NoObservableSideEffectsScope scope(this);
10416 DCHECK(expr->arguments()->length() == 1);
10417 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10418 HValue* view = Pop();
10419
10420 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10421 view, nullptr,
10422 FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteOffsetOffset)));
10423}
10424
10425
10426void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
10427 CallRuntime* expr) {
10428 NoObservableSideEffectsScope scope(this);
10429 DCHECK(expr->arguments()->length() == 1);
10430 CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10431 HValue* view = Pop();
10432
10433 return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10434 view, nullptr,
10435 FieldIndex::ForInObjectOffset(JSTypedArray::kLengthOffset)));
10436}
10437
10438
10439void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
10440 DCHECK(!HasStackOverflow());
10441 DCHECK(current_block() != NULL);
10442 DCHECK(current_block()->HasPredecessor());
10443 if (expr->is_jsruntime()) {
10444 return Bailout(kCallToAJavaScriptRuntimeFunction);
10445 }
10446
10447 const Runtime::Function* function = expr->function();
10448 DCHECK(function != NULL);
10449 switch (function->function_id) {
10450#define CALL_INTRINSIC_GENERATOR(Name) \
10451 case Runtime::kInline##Name: \
10452 return Generate##Name(expr);
10453
10454 FOR_EACH_HYDROGEN_INTRINSIC(CALL_INTRINSIC_GENERATOR)
10455#undef CALL_INTRINSIC_GENERATOR
10456 default: {
10457 int argument_count = expr->arguments()->length();
10458 CHECK_ALIVE(VisitExpressions(expr->arguments()));
10459 PushArgumentsFromEnvironment(argument_count);
10460 HCallRuntime* call = New<HCallRuntime>(function, argument_count);
10461 return ast_context()->ReturnInstruction(call, expr->id());
10462 }
10463 }
10464}
10465
10466
10467void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
10468 DCHECK(!HasStackOverflow());
10469 DCHECK(current_block() != NULL);
10470 DCHECK(current_block()->HasPredecessor());
10471 switch (expr->op()) {
10472 case Token::DELETE: return VisitDelete(expr);
10473 case Token::VOID: return VisitVoid(expr);
10474 case Token::TYPEOF: return VisitTypeof(expr);
10475 case Token::NOT: return VisitNot(expr);
10476 default: UNREACHABLE();
10477 }
10478}
10479
10480
10481void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
10482 Property* prop = expr->expression()->AsProperty();
10483 VariableProxy* proxy = expr->expression()->AsVariableProxy();
10484 if (prop != NULL) {
10485 CHECK_ALIVE(VisitForValue(prop->obj()));
10486 CHECK_ALIVE(VisitForValue(prop->key()));
10487 HValue* key = Pop();
10488 HValue* obj = Pop();
10489 Add<HPushArguments>(obj, key);
10490 HInstruction* instr = New<HCallRuntime>(
10491 Runtime::FunctionForId(is_strict(function_language_mode())
10492 ? Runtime::kDeleteProperty_Strict
10493 : Runtime::kDeleteProperty_Sloppy),
10494 2);
10495 return ast_context()->ReturnInstruction(instr, expr->id());
10496 } else if (proxy != NULL) {
10497 Variable* var = proxy->var();
10498 if (var->IsUnallocatedOrGlobalSlot()) {
10499 Bailout(kDeleteWithGlobalVariable);
10500 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
10501 // Result of deleting non-global variables is false. 'this' is not really
10502 // a variable, though we implement it as one. The subexpression does not
10503 // have side effects.
10504 HValue* value = var->HasThisName(isolate()) ? graph()->GetConstantTrue()
10505 : graph()->GetConstantFalse();
10506 return ast_context()->ReturnValue(value);
10507 } else {
10508 Bailout(kDeleteWithNonGlobalVariable);
10509 }
10510 } else {
10511 // Result of deleting non-property, non-variable reference is true.
10512 // Evaluate the subexpression for side effects.
10513 CHECK_ALIVE(VisitForEffect(expr->expression()));
10514 return ast_context()->ReturnValue(graph()->GetConstantTrue());
10515 }
10516}
10517
10518
10519void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
10520 CHECK_ALIVE(VisitForEffect(expr->expression()));
10521 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10522}
10523
10524
10525void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
10526 CHECK_ALIVE(VisitForTypeOf(expr->expression()));
10527 HValue* value = Pop();
10528 HInstruction* instr = New<HTypeof>(value);
10529 return ast_context()->ReturnInstruction(instr, expr->id());
10530}
10531
10532
10533void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
10534 if (ast_context()->IsTest()) {
10535 TestContext* context = TestContext::cast(ast_context());
10536 VisitForControl(expr->expression(),
10537 context->if_false(),
10538 context->if_true());
10539 return;
10540 }
10541
10542 if (ast_context()->IsEffect()) {
10543 VisitForEffect(expr->expression());
10544 return;
10545 }
10546
10547 DCHECK(ast_context()->IsValue());
10548 HBasicBlock* materialize_false = graph()->CreateBasicBlock();
10549 HBasicBlock* materialize_true = graph()->CreateBasicBlock();
10550 CHECK_BAILOUT(VisitForControl(expr->expression(),
10551 materialize_false,
10552 materialize_true));
10553
10554 if (materialize_false->HasPredecessor()) {
10555 materialize_false->SetJoinId(expr->MaterializeFalseId());
10556 set_current_block(materialize_false);
10557 Push(graph()->GetConstantFalse());
10558 } else {
10559 materialize_false = NULL;
10560 }
10561
10562 if (materialize_true->HasPredecessor()) {
10563 materialize_true->SetJoinId(expr->MaterializeTrueId());
10564 set_current_block(materialize_true);
10565 Push(graph()->GetConstantTrue());
10566 } else {
10567 materialize_true = NULL;
10568 }
10569
10570 HBasicBlock* join =
10571 CreateJoin(materialize_false, materialize_true, expr->id());
10572 set_current_block(join);
10573 if (join != NULL) return ast_context()->ReturnValue(Pop());
10574}
10575
10576
10577static Representation RepresentationFor(Type* type) {
10578 DisallowHeapAllocation no_allocation;
10579 if (type->Is(Type::None())) return Representation::None();
10580 if (type->Is(Type::SignedSmall())) return Representation::Smi();
10581 if (type->Is(Type::Signed32())) return Representation::Integer32();
10582 if (type->Is(Type::Number())) return Representation::Double();
10583 return Representation::Tagged();
10584}
10585
10586
10587HInstruction* HOptimizedGraphBuilder::BuildIncrement(
10588 bool returns_original_input,
10589 CountOperation* expr) {
10590 // The input to the count operation is on top of the expression stack.
10591 Representation rep = RepresentationFor(expr->type());
10592 if (rep.IsNone() || rep.IsTagged()) {
10593 rep = Representation::Smi();
10594 }
10595
10596 if (returns_original_input && !is_strong(function_language_mode())) {
10597 // We need an explicit HValue representing ToNumber(input). The
10598 // actual HChange instruction we need is (sometimes) added in a later
10599 // phase, so it is not available now to be used as an input to HAdd and
10600 // as the return value.
10601 HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
10602 if (!rep.IsDouble()) {
10603 number_input->SetFlag(HInstruction::kFlexibleRepresentation);
10604 number_input->SetFlag(HInstruction::kCannotBeTagged);
10605 }
10606 Push(number_input);
10607 }
10608
10609 // The addition has no side effects, so we do not need
10610 // to simulate the expression stack after this instruction.
10611 // Any later failures deopt to the load of the input or earlier.
10612 HConstant* delta = (expr->op() == Token::INC)
10613 ? graph()->GetConstant1()
10614 : graph()->GetConstantMinus1();
10615 HInstruction* instr =
10616 AddUncasted<HAdd>(Top(), delta, strength(function_language_mode()));
10617 if (instr->IsAdd()) {
10618 HAdd* add = HAdd::cast(instr);
10619 add->set_observed_input_representation(1, rep);
10620 add->set_observed_input_representation(2, Representation::Smi());
10621 }
10622 if (!is_strong(function_language_mode())) {
10623 instr->ClearAllSideEffects();
10624 } else {
10625 Add<HSimulate>(expr->ToNumberId(), REMOVABLE_SIMULATE);
10626 }
10627 instr->SetFlag(HInstruction::kCannotBeTagged);
10628 return instr;
10629}
10630
10631
10632void HOptimizedGraphBuilder::BuildStoreForEffect(
10633 Expression* expr, Property* prop, FeedbackVectorSlot slot, BailoutId ast_id,
10634 BailoutId return_id, HValue* object, HValue* key, HValue* value) {
10635 EffectContext for_effect(this);
10636 Push(object);
10637 if (key != NULL) Push(key);
10638 Push(value);
10639 BuildStore(expr, prop, slot, ast_id, return_id);
10640}
10641
10642
10643void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
10644 DCHECK(!HasStackOverflow());
10645 DCHECK(current_block() != NULL);
10646 DCHECK(current_block()->HasPredecessor());
10647 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
10648 Expression* target = expr->expression();
10649 VariableProxy* proxy = target->AsVariableProxy();
10650 Property* prop = target->AsProperty();
10651 if (proxy == NULL && prop == NULL) {
10652 return Bailout(kInvalidLhsInCountOperation);
10653 }
10654
10655 // Match the full code generator stack by simulating an extra stack
10656 // element for postfix operations in a non-effect context. The return
10657 // value is ToNumber(input).
10658 bool returns_original_input =
10659 expr->is_postfix() && !ast_context()->IsEffect();
10660 HValue* input = NULL; // ToNumber(original_input).
10661 HValue* after = NULL; // The result after incrementing or decrementing.
10662
10663 if (proxy != NULL) {
10664 Variable* var = proxy->var();
10665 if (var->mode() == CONST_LEGACY) {
10666 return Bailout(kUnsupportedCountOperationWithConst);
10667 }
10668 if (var->mode() == CONST) {
10669 return Bailout(kNonInitializerAssignmentToConst);
10670 }
10671 // Argument of the count operation is a variable, not a property.
10672 DCHECK(prop == NULL);
10673 CHECK_ALIVE(VisitForValue(target));
10674
10675 after = BuildIncrement(returns_original_input, expr);
10676 input = returns_original_input ? Top() : Pop();
10677 Push(after);
10678
10679 switch (var->location()) {
10680 case VariableLocation::GLOBAL:
10681 case VariableLocation::UNALLOCATED:
10682 HandleGlobalVariableAssignment(var, after, expr->CountSlot(),
10683 expr->AssignmentId());
10684 break;
10685
10686 case VariableLocation::PARAMETER:
10687 case VariableLocation::LOCAL:
10688 BindIfLive(var, after);
10689 break;
10690
10691 case VariableLocation::CONTEXT: {
10692 // Bail out if we try to mutate a parameter value in a function
10693 // using the arguments object. We do not (yet) correctly handle the
10694 // arguments property of the function.
10695 if (current_info()->scope()->arguments() != NULL) {
10696 // Parameters will rewrite to context slots. We have no direct
10697 // way to detect that the variable is a parameter so we use a
10698 // linear search of the parameter list.
10699 int count = current_info()->scope()->num_parameters();
10700 for (int i = 0; i < count; ++i) {
10701 if (var == current_info()->scope()->parameter(i)) {
10702 return Bailout(kAssignmentToParameterInArgumentsObject);
10703 }
10704 }
10705 }
10706
10707 HValue* context = BuildContextChainWalk(var);
10708 HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10709 ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10710 HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10711 mode, after);
10712 if (instr->HasObservableSideEffects()) {
10713 Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10714 }
10715 break;
10716 }
10717
10718 case VariableLocation::LOOKUP:
10719 return Bailout(kLookupVariableInCountOperation);
10720 }
10721
10722 Drop(returns_original_input ? 2 : 1);
10723 return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10724 }
10725
10726 // Argument of the count operation is a property.
10727 DCHECK(prop != NULL);
10728 if (returns_original_input) Push(graph()->GetConstantUndefined());
10729
10730 CHECK_ALIVE(VisitForValue(prop->obj()));
10731 HValue* object = Top();
10732
10733 HValue* key = NULL;
10734 if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
10735 CHECK_ALIVE(VisitForValue(prop->key()));
10736 key = Top();
10737 }
10738
10739 CHECK_ALIVE(PushLoad(prop, object, key));
10740
10741 after = BuildIncrement(returns_original_input, expr);
10742
10743 if (returns_original_input) {
10744 input = Pop();
10745 // Drop object and key to push it again in the effect context below.
10746 Drop(key == NULL ? 1 : 2);
10747 environment()->SetExpressionStackAt(0, input);
10748 CHECK_ALIVE(BuildStoreForEffect(expr, prop, expr->CountSlot(), expr->id(),
10749 expr->AssignmentId(), object, key, after));
10750 return ast_context()->ReturnValue(Pop());
10751 }
10752
10753 environment()->SetExpressionStackAt(0, after);
10754 return BuildStore(expr, prop, expr->CountSlot(), expr->id(),
10755 expr->AssignmentId());
10756}
10757
10758
10759HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
10760 HValue* string,
10761 HValue* index) {
10762 if (string->IsConstant() && index->IsConstant()) {
10763 HConstant* c_string = HConstant::cast(string);
10764 HConstant* c_index = HConstant::cast(index);
10765 if (c_string->HasStringValue() && c_index->HasNumberValue()) {
10766 int32_t i = c_index->NumberValueAsInteger32();
10767 Handle<String> s = c_string->StringValue();
10768 if (i < 0 || i >= s->length()) {
10769 return New<HConstant>(std::numeric_limits<double>::quiet_NaN());
10770 }
10771 return New<HConstant>(s->Get(i));
10772 }
10773 }
10774 string = BuildCheckString(string);
10775 index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
10776 return New<HStringCharCodeAt>(string, index);
10777}
10778
10779
10780// Checks if the given shift amounts have following forms:
10781// (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
10782static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
10783 HValue* const32_minus_sa) {
10784 if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
10785 const HConstant* c1 = HConstant::cast(sa);
10786 const HConstant* c2 = HConstant::cast(const32_minus_sa);
10787 return c1->HasInteger32Value() && c2->HasInteger32Value() &&
10788 (c1->Integer32Value() + c2->Integer32Value() == 32);
10789 }
10790 if (!const32_minus_sa->IsSub()) return false;
10791 HSub* sub = HSub::cast(const32_minus_sa);
10792 return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
10793}
10794
10795
10796// Checks if the left and the right are shift instructions with the oposite
10797// directions that can be replaced by one rotate right instruction or not.
10798// Returns the operand and the shift amount for the rotate instruction in the
10799// former case.
10800bool HGraphBuilder::MatchRotateRight(HValue* left,
10801 HValue* right,
10802 HValue** operand,
10803 HValue** shift_amount) {
10804 HShl* shl;
10805 HShr* shr;
10806 if (left->IsShl() && right->IsShr()) {
10807 shl = HShl::cast(left);
10808 shr = HShr::cast(right);
10809 } else if (left->IsShr() && right->IsShl()) {
10810 shl = HShl::cast(right);
10811 shr = HShr::cast(left);
10812 } else {
10813 return false;
10814 }
10815 if (shl->left() != shr->left()) return false;
10816
10817 if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
10818 !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
10819 return false;
10820 }
10821 *operand = shr->left();
10822 *shift_amount = shr->right();
10823 return true;
10824}
10825
10826
10827bool CanBeZero(HValue* right) {
10828 if (right->IsConstant()) {
10829 HConstant* right_const = HConstant::cast(right);
10830 if (right_const->HasInteger32Value() &&
10831 (right_const->Integer32Value() & 0x1f) != 0) {
10832 return false;
10833 }
10834 }
10835 return true;
10836}
10837
10838
10839HValue* HGraphBuilder::EnforceNumberType(HValue* number,
10840 Type* expected) {
10841 if (expected->Is(Type::SignedSmall())) {
10842 return AddUncasted<HForceRepresentation>(number, Representation::Smi());
10843 }
10844 if (expected->Is(Type::Signed32())) {
10845 return AddUncasted<HForceRepresentation>(number,
10846 Representation::Integer32());
10847 }
10848 return number;
10849}
10850
10851
10852HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
10853 if (value->IsConstant()) {
10854 HConstant* constant = HConstant::cast(value);
10855 Maybe<HConstant*> number =
10856 constant->CopyToTruncatedNumber(isolate(), zone());
10857 if (number.IsJust()) {
10858 *expected = Type::Number(zone());
10859 return AddInstruction(number.FromJust());
10860 }
10861 }
10862
10863 // We put temporary values on the stack, which don't correspond to anything
10864 // in baseline code. Since nothing is observable we avoid recording those
10865 // pushes with a NoObservableSideEffectsScope.
10866 NoObservableSideEffectsScope no_effects(this);
10867
10868 Type* expected_type = *expected;
10869
10870 // Separate the number type from the rest.
10871 Type* expected_obj =
10872 Type::Intersect(expected_type, Type::NonNumber(zone()), zone());
10873 Type* expected_number =
10874 Type::Intersect(expected_type, Type::Number(zone()), zone());
10875
10876 // We expect to get a number.
10877 // (We need to check first, since Type::None->Is(Type::Any()) == true.
10878 if (expected_obj->Is(Type::None())) {
10879 DCHECK(!expected_number->Is(Type::None(zone())));
10880 return value;
10881 }
10882
10883 if (expected_obj->Is(Type::Undefined(zone()))) {
10884 // This is already done by HChange.
10885 *expected = Type::Union(expected_number, Type::Number(zone()), zone());
10886 return value;
10887 }
10888
10889 return value;
10890}
10891
10892
10893HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
10894 BinaryOperation* expr,
10895 HValue* left,
10896 HValue* right,
10897 PushBeforeSimulateBehavior push_sim_result) {
10898 Type* left_type = expr->left()->bounds().lower;
10899 Type* right_type = expr->right()->bounds().lower;
10900 Type* result_type = expr->bounds().lower;
10901 Maybe<int> fixed_right_arg = expr->fixed_right_arg();
10902 Handle<AllocationSite> allocation_site = expr->allocation_site();
10903
10904 HAllocationMode allocation_mode;
10905 if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
10906 allocation_mode = HAllocationMode(allocation_site);
10907 }
10908 HValue* result = HGraphBuilder::BuildBinaryOperation(
10909 expr->op(), left, right, left_type, right_type, result_type,
10910 fixed_right_arg, allocation_mode, strength(function_language_mode()),
10911 expr->id());
10912 // Add a simulate after instructions with observable side effects, and
10913 // after phis, which are the result of BuildBinaryOperation when we
10914 // inlined some complex subgraph.
10915 if (result->HasObservableSideEffects() || result->IsPhi()) {
10916 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10917 Push(result);
10918 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10919 Drop(1);
10920 } else {
10921 Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10922 }
10923 }
10924 return result;
10925}
10926
10927
10928HValue* HGraphBuilder::BuildBinaryOperation(
10929 Token::Value op, HValue* left, HValue* right, Type* left_type,
10930 Type* right_type, Type* result_type, Maybe<int> fixed_right_arg,
10931 HAllocationMode allocation_mode, Strength strength, BailoutId opt_id) {
10932 bool maybe_string_add = false;
10933 if (op == Token::ADD) {
10934 // If we are adding constant string with something for which we don't have
10935 // a feedback yet, assume that it's also going to be a string and don't
10936 // generate deopt instructions.
10937 if (!left_type->IsInhabited() && right->IsConstant() &&
10938 HConstant::cast(right)->HasStringValue()) {
10939 left_type = Type::String();
10940 }
10941
10942 if (!right_type->IsInhabited() && left->IsConstant() &&
10943 HConstant::cast(left)->HasStringValue()) {
10944 right_type = Type::String();
10945 }
10946
10947 maybe_string_add = (left_type->Maybe(Type::String()) ||
10948 left_type->Maybe(Type::Receiver()) ||
10949 right_type->Maybe(Type::String()) ||
10950 right_type->Maybe(Type::Receiver()));
10951 }
10952
10953 Representation left_rep = RepresentationFor(left_type);
10954 Representation right_rep = RepresentationFor(right_type);
10955
10956 if (!left_type->IsInhabited()) {
10957 Add<HDeoptimize>(
10958 Deoptimizer::kInsufficientTypeFeedbackForLHSOfBinaryOperation,
10959 Deoptimizer::SOFT);
10960 left_type = Type::Any(zone());
10961 left_rep = RepresentationFor(left_type);
10962 maybe_string_add = op == Token::ADD;
10963 }
10964
10965 if (!right_type->IsInhabited()) {
10966 Add<HDeoptimize>(
10967 Deoptimizer::kInsufficientTypeFeedbackForRHSOfBinaryOperation,
10968 Deoptimizer::SOFT);
10969 right_type = Type::Any(zone());
10970 right_rep = RepresentationFor(right_type);
10971 maybe_string_add = op == Token::ADD;
10972 }
10973
10974 if (!maybe_string_add && !is_strong(strength)) {
10975 left = TruncateToNumber(left, &left_type);
10976 right = TruncateToNumber(right, &right_type);
10977 }
10978
10979 // Special case for string addition here.
10980 if (op == Token::ADD &&
10981 (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
10982 if (is_strong(strength)) {
10983 // In strong mode, if the one side of an addition is a string,
10984 // the other side must be a string too.
10985 left = BuildCheckString(left);
10986 right = BuildCheckString(right);
10987 } else {
10988 // Validate type feedback for left argument.
10989 if (left_type->Is(Type::String())) {
10990 left = BuildCheckString(left);
10991 }
10992
10993 // Validate type feedback for right argument.
10994 if (right_type->Is(Type::String())) {
10995 right = BuildCheckString(right);
10996 }
10997
10998 // Convert left argument as necessary.
10999 if (left_type->Is(Type::Number())) {
11000 DCHECK(right_type->Is(Type::String()));
11001 left = BuildNumberToString(left, left_type);
11002 } else if (!left_type->Is(Type::String())) {
11003 DCHECK(right_type->Is(Type::String()));
11004 return AddUncasted<HStringAdd>(
11005 left, right, allocation_mode.GetPretenureMode(),
11006 STRING_ADD_CONVERT_LEFT, allocation_mode.feedback_site());
11007 }
11008
11009 // Convert right argument as necessary.
11010 if (right_type->Is(Type::Number())) {
11011 DCHECK(left_type->Is(Type::String()));
11012 right = BuildNumberToString(right, right_type);
11013 } else if (!right_type->Is(Type::String())) {
11014 DCHECK(left_type->Is(Type::String()));
11015 return AddUncasted<HStringAdd>(
11016 left, right, allocation_mode.GetPretenureMode(),
11017 STRING_ADD_CONVERT_RIGHT, allocation_mode.feedback_site());
11018 }
11019 }
11020
11021 // Fast paths for empty constant strings.
11022 Handle<String> left_string =
11023 left->IsConstant() && HConstant::cast(left)->HasStringValue()
11024 ? HConstant::cast(left)->StringValue()
11025 : Handle<String>();
11026 Handle<String> right_string =
11027 right->IsConstant() && HConstant::cast(right)->HasStringValue()
11028 ? HConstant::cast(right)->StringValue()
11029 : Handle<String>();
11030 if (!left_string.is_null() && left_string->length() == 0) return right;
11031 if (!right_string.is_null() && right_string->length() == 0) return left;
11032 if (!left_string.is_null() && !right_string.is_null()) {
11033 return AddUncasted<HStringAdd>(
11034 left, right, allocation_mode.GetPretenureMode(),
11035 STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
11036 }
11037
11038 // Register the dependent code with the allocation site.
11039 if (!allocation_mode.feedback_site().is_null()) {
11040 DCHECK(!graph()->info()->IsStub());
11041 Handle<AllocationSite> site(allocation_mode.feedback_site());
11042 top_info()->dependencies()->AssumeTenuringDecision(site);
11043 }
11044
11045 // Inline the string addition into the stub when creating allocation
11046 // mementos to gather allocation site feedback, or if we can statically
11047 // infer that we're going to create a cons string.
11048 if ((graph()->info()->IsStub() &&
11049 allocation_mode.CreateAllocationMementos()) ||
11050 (left->IsConstant() &&
11051 HConstant::cast(left)->HasStringValue() &&
11052 HConstant::cast(left)->StringValue()->length() + 1 >=
11053 ConsString::kMinLength) ||
11054 (right->IsConstant() &&
11055 HConstant::cast(right)->HasStringValue() &&
11056 HConstant::cast(right)->StringValue()->length() + 1 >=
11057 ConsString::kMinLength)) {
11058 return BuildStringAdd(left, right, allocation_mode);
11059 }
11060
11061 // Fallback to using the string add stub.
11062 return AddUncasted<HStringAdd>(
11063 left, right, allocation_mode.GetPretenureMode(), STRING_ADD_CHECK_NONE,
11064 allocation_mode.feedback_site());
11065 }
11066
11067 if (graph()->info()->IsStub()) {
11068 left = EnforceNumberType(left, left_type);
11069 right = EnforceNumberType(right, right_type);
11070 }
11071
11072 Representation result_rep = RepresentationFor(result_type);
11073
11074 bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
11075 (right_rep.IsTagged() && !right_rep.IsSmi());
11076
11077 HInstruction* instr = NULL;
11078 // Only the stub is allowed to call into the runtime, since otherwise we would
11079 // inline several instructions (including the two pushes) for every tagged
11080 // operation in optimized code, which is more expensive, than a stub call.
11081 if (graph()->info()->IsStub() && is_non_primitive) {
11082 Runtime::FunctionId function_id;
11083 switch (op) {
11084 default:
11085 UNREACHABLE();
11086 case Token::ADD:
11087 function_id =
11088 is_strong(strength) ? Runtime::kAdd_Strong : Runtime::kAdd;
11089 break;
11090 case Token::SUB:
11091 function_id = is_strong(strength) ? Runtime::kSubtract_Strong
11092 : Runtime::kSubtract;
11093 break;
11094 case Token::MUL:
11095 function_id = is_strong(strength) ? Runtime::kMultiply_Strong
11096 : Runtime::kMultiply;
11097 break;
11098 case Token::DIV:
11099 function_id =
11100 is_strong(strength) ? Runtime::kDivide_Strong : Runtime::kDivide;
11101 break;
11102 case Token::MOD:
11103 function_id =
11104 is_strong(strength) ? Runtime::kModulus_Strong : Runtime::kModulus;
11105 break;
11106 case Token::BIT_OR:
11107 function_id = is_strong(strength) ? Runtime::kBitwiseOr_Strong
11108 : Runtime::kBitwiseOr;
11109 break;
11110 case Token::BIT_AND:
11111 function_id = is_strong(strength) ? Runtime::kBitwiseAnd_Strong
11112 : Runtime::kBitwiseAnd;
11113 break;
11114 case Token::BIT_XOR:
11115 function_id = is_strong(strength) ? Runtime::kBitwiseXor_Strong
11116 : Runtime::kBitwiseXor;
11117 break;
11118 case Token::SAR:
11119 function_id = is_strong(strength) ? Runtime::kShiftRight_Strong
11120 : Runtime::kShiftRight;
11121 break;
11122 case Token::SHR:
11123 function_id = is_strong(strength) ? Runtime::kShiftRightLogical_Strong
11124 : Runtime::kShiftRightLogical;
11125 break;
11126 case Token::SHL:
11127 function_id = is_strong(strength) ? Runtime::kShiftLeft_Strong
11128 : Runtime::kShiftLeft;
11129 break;
11130 }
11131 Add<HPushArguments>(left, right);
11132 instr = AddUncasted<HCallRuntime>(Runtime::FunctionForId(function_id), 2);
11133 } else {
11134 if (is_strong(strength) && Token::IsBitOp(op)) {
11135 // TODO(conradw): This is not efficient, but is necessary to prevent
11136 // conversion of oddball values to numbers in strong mode. It would be
11137 // better to prevent the conversion rather than adding a runtime check.
11138 IfBuilder if_builder(this);
11139 if_builder.If<HHasInstanceTypeAndBranch>(left, ODDBALL_TYPE);
11140 if_builder.OrIf<HHasInstanceTypeAndBranch>(right, ODDBALL_TYPE);
11141 if_builder.Then();
11142 Add<HCallRuntime>(
11143 Runtime::FunctionForId(Runtime::kThrowStrongModeImplicitConversion),
11144 0);
11145 if (!graph()->info()->IsStub()) {
11146 Add<HSimulate>(opt_id, REMOVABLE_SIMULATE);
11147 }
11148 if_builder.End();
11149 }
11150 switch (op) {
11151 case Token::ADD:
11152 instr = AddUncasted<HAdd>(left, right, strength);
11153 break;
11154 case Token::SUB:
11155 instr = AddUncasted<HSub>(left, right, strength);
11156 break;
11157 case Token::MUL:
11158 instr = AddUncasted<HMul>(left, right, strength);
11159 break;
11160 case Token::MOD: {
11161 if (fixed_right_arg.IsJust() &&
11162 !right->EqualsInteger32Constant(fixed_right_arg.FromJust())) {
11163 HConstant* fixed_right =
11164 Add<HConstant>(static_cast<int>(fixed_right_arg.FromJust()));
11165 IfBuilder if_same(this);
11166 if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
11167 if_same.Then();
11168 if_same.ElseDeopt(Deoptimizer::kUnexpectedRHSOfBinaryOperation);
11169 right = fixed_right;
11170 }
11171 instr = AddUncasted<HMod>(left, right, strength);
11172 break;
11173 }
11174 case Token::DIV:
11175 instr = AddUncasted<HDiv>(left, right, strength);
11176 break;
11177 case Token::BIT_XOR:
11178 case Token::BIT_AND:
11179 instr = AddUncasted<HBitwise>(op, left, right, strength);
11180 break;
11181 case Token::BIT_OR: {
11182 HValue *operand, *shift_amount;
11183 if (left_type->Is(Type::Signed32()) &&
11184 right_type->Is(Type::Signed32()) &&
11185 MatchRotateRight(left, right, &operand, &shift_amount)) {
11186 instr = AddUncasted<HRor>(operand, shift_amount, strength);
11187 } else {
11188 instr = AddUncasted<HBitwise>(op, left, right, strength);
11189 }
11190 break;
11191 }
11192 case Token::SAR:
11193 instr = AddUncasted<HSar>(left, right, strength);
11194 break;
11195 case Token::SHR:
11196 instr = AddUncasted<HShr>(left, right, strength);
11197 if (instr->IsShr() && CanBeZero(right)) {
11198 graph()->RecordUint32Instruction(instr);
11199 }
11200 break;
11201 case Token::SHL:
11202 instr = AddUncasted<HShl>(left, right, strength);
11203 break;
11204 default:
11205 UNREACHABLE();
11206 }
11207 }
11208
11209 if (instr->IsBinaryOperation()) {
11210 HBinaryOperation* binop = HBinaryOperation::cast(instr);
11211 binop->set_observed_input_representation(1, left_rep);
11212 binop->set_observed_input_representation(2, right_rep);
11213 binop->initialize_output_representation(result_rep);
11214 if (graph()->info()->IsStub()) {
11215 // Stub should not call into stub.
11216 instr->SetFlag(HValue::kCannotBeTagged);
11217 // And should truncate on HForceRepresentation already.
11218 if (left->IsForceRepresentation()) {
11219 left->CopyFlag(HValue::kTruncatingToSmi, instr);
11220 left->CopyFlag(HValue::kTruncatingToInt32, instr);
11221 }
11222 if (right->IsForceRepresentation()) {
11223 right->CopyFlag(HValue::kTruncatingToSmi, instr);
11224 right->CopyFlag(HValue::kTruncatingToInt32, instr);
11225 }
11226 }
11227 }
11228 return instr;
11229}
11230
11231
11232// Check for the form (%_ClassOf(foo) === 'BarClass').
11233static bool IsClassOfTest(CompareOperation* expr) {
11234 if (expr->op() != Token::EQ_STRICT) return false;
11235 CallRuntime* call = expr->left()->AsCallRuntime();
11236 if (call == NULL) return false;
11237 Literal* literal = expr->right()->AsLiteral();
11238 if (literal == NULL) return false;
11239 if (!literal->value()->IsString()) return false;
11240 if (!call->is_jsruntime() &&
11241 call->function()->function_id != Runtime::kInlineClassOf) {
11242 return false;
11243 }
11244 DCHECK(call->arguments()->length() == 1);
11245 return true;
11246}
11247
11248
11249void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
11250 DCHECK(!HasStackOverflow());
11251 DCHECK(current_block() != NULL);
11252 DCHECK(current_block()->HasPredecessor());
11253 switch (expr->op()) {
11254 case Token::COMMA:
11255 return VisitComma(expr);
11256 case Token::OR:
11257 case Token::AND:
11258 return VisitLogicalExpression(expr);
11259 default:
11260 return VisitArithmeticExpression(expr);
11261 }
11262}
11263
11264
11265void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
11266 CHECK_ALIVE(VisitForEffect(expr->left()));
11267 // Visit the right subexpression in the same AST context as the entire
11268 // expression.
11269 Visit(expr->right());
11270}
11271
11272
11273void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
11274 bool is_logical_and = expr->op() == Token::AND;
11275 if (ast_context()->IsTest()) {
11276 TestContext* context = TestContext::cast(ast_context());
11277 // Translate left subexpression.
11278 HBasicBlock* eval_right = graph()->CreateBasicBlock();
11279 if (is_logical_and) {
11280 CHECK_BAILOUT(VisitForControl(expr->left(),
11281 eval_right,
11282 context->if_false()));
11283 } else {
11284 CHECK_BAILOUT(VisitForControl(expr->left(),
11285 context->if_true(),
11286 eval_right));
11287 }
11288
11289 // Translate right subexpression by visiting it in the same AST
11290 // context as the entire expression.
11291 if (eval_right->HasPredecessor()) {
11292 eval_right->SetJoinId(expr->RightId());
11293 set_current_block(eval_right);
11294 Visit(expr->right());
11295 }
11296
11297 } else if (ast_context()->IsValue()) {
11298 CHECK_ALIVE(VisitForValue(expr->left()));
11299 DCHECK(current_block() != NULL);
11300 HValue* left_value = Top();
11301
11302 // Short-circuit left values that always evaluate to the same boolean value.
11303 if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
11304 // l (evals true) && r -> r
11305 // l (evals true) || r -> l
11306 // l (evals false) && r -> l
11307 // l (evals false) || r -> r
11308 if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
11309 Drop(1);
11310 CHECK_ALIVE(VisitForValue(expr->right()));
11311 }
11312 return ast_context()->ReturnValue(Pop());
11313 }
11314
11315 // We need an extra block to maintain edge-split form.
11316 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11317 HBasicBlock* eval_right = graph()->CreateBasicBlock();
11318 ToBooleanStub::Types expected(expr->left()->to_boolean_types());
11319 HBranch* test = is_logical_and
11320 ? New<HBranch>(left_value, expected, eval_right, empty_block)
11321 : New<HBranch>(left_value, expected, empty_block, eval_right);
11322 FinishCurrentBlock(test);
11323
11324 set_current_block(eval_right);
11325 Drop(1); // Value of the left subexpression.
11326 CHECK_BAILOUT(VisitForValue(expr->right()));
11327
11328 HBasicBlock* join_block =
11329 CreateJoin(empty_block, current_block(), expr->id());
11330 set_current_block(join_block);
11331 return ast_context()->ReturnValue(Pop());
11332
11333 } else {
11334 DCHECK(ast_context()->IsEffect());
11335 // In an effect context, we don't need the value of the left subexpression,
11336 // only its control flow and side effects. We need an extra block to
11337 // maintain edge-split form.
11338 HBasicBlock* empty_block = graph()->CreateBasicBlock();
11339 HBasicBlock* right_block = graph()->CreateBasicBlock();
11340 if (is_logical_and) {
11341 CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
11342 } else {
11343 CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
11344 }
11345
11346 // TODO(kmillikin): Find a way to fix this. It's ugly that there are
11347 // actually two empty blocks (one here and one inserted by
11348 // TestContext::BuildBranch, and that they both have an HSimulate though the
11349 // second one is not a merge node, and that we really have no good AST ID to
11350 // put on that first HSimulate.
11351
11352 if (empty_block->HasPredecessor()) {
11353 empty_block->SetJoinId(expr->id());
11354 } else {
11355 empty_block = NULL;
11356 }
11357
11358 if (right_block->HasPredecessor()) {
11359 right_block->SetJoinId(expr->RightId());
11360 set_current_block(right_block);
11361 CHECK_BAILOUT(VisitForEffect(expr->right()));
11362 right_block = current_block();
11363 } else {
11364 right_block = NULL;
11365 }
11366
11367 HBasicBlock* join_block =
11368 CreateJoin(empty_block, right_block, expr->id());
11369 set_current_block(join_block);
11370 // We did not materialize any value in the predecessor environments,
11371 // so there is no need to handle it here.
11372 }
11373}
11374
11375
11376void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
11377 CHECK_ALIVE(VisitForValue(expr->left()));
11378 CHECK_ALIVE(VisitForValue(expr->right()));
11379 SetSourcePosition(expr->position());
11380 HValue* right = Pop();
11381 HValue* left = Pop();
11382 HValue* result =
11383 BuildBinaryOperation(expr, left, right,
11384 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11385 : PUSH_BEFORE_SIMULATE);
11386 if (top_info()->is_tracking_positions() && result->IsBinaryOperation()) {
11387 HBinaryOperation::cast(result)->SetOperandPositions(
11388 zone(),
11389 ScriptPositionToSourcePosition(expr->left()->position()),
11390 ScriptPositionToSourcePosition(expr->right()->position()));
11391 }
11392 return ast_context()->ReturnValue(result);
11393}
11394
11395
11396void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
11397 Expression* sub_expr,
11398 Handle<String> check) {
11399 CHECK_ALIVE(VisitForTypeOf(sub_expr));
11400 SetSourcePosition(expr->position());
11401 HValue* value = Pop();
11402 HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
11403 return ast_context()->ReturnControl(instr, expr->id());
11404}
11405
11406
11407static bool IsLiteralCompareBool(Isolate* isolate,
11408 HValue* left,
11409 Token::Value op,
11410 HValue* right) {
11411 return op == Token::EQ_STRICT &&
11412 ((left->IsConstant() &&
11413 HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
11414 (right->IsConstant() &&
11415 HConstant::cast(right)->handle(isolate)->IsBoolean()));
11416}
11417
11418
11419void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
11420 DCHECK(!HasStackOverflow());
11421 DCHECK(current_block() != NULL);
11422 DCHECK(current_block()->HasPredecessor());
11423
11424 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
11425
11426 // Check for a few fast cases. The AST visiting behavior must be in sync
11427 // with the full codegen: We don't push both left and right values onto
11428 // the expression stack when one side is a special-case literal.
11429 Expression* sub_expr = NULL;
11430 Handle<String> check;
11431 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
11432 return HandleLiteralCompareTypeof(expr, sub_expr, check);
11433 }
11434 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
11435 return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
11436 }
11437 if (expr->IsLiteralCompareNull(&sub_expr)) {
11438 return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
11439 }
11440
11441 if (IsClassOfTest(expr)) {
11442 CallRuntime* call = expr->left()->AsCallRuntime();
11443 DCHECK(call->arguments()->length() == 1);
11444 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11445 HValue* value = Pop();
11446 Literal* literal = expr->right()->AsLiteral();
11447 Handle<String> rhs = Handle<String>::cast(literal->value());
11448 HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
11449 return ast_context()->ReturnControl(instr, expr->id());
11450 }
11451
11452 Type* left_type = expr->left()->bounds().lower;
11453 Type* right_type = expr->right()->bounds().lower;
11454 Type* combined_type = expr->combined_type();
11455
11456 CHECK_ALIVE(VisitForValue(expr->left()));
11457 CHECK_ALIVE(VisitForValue(expr->right()));
11458
11459 HValue* right = Pop();
11460 HValue* left = Pop();
11461 Token::Value op = expr->op();
11462
11463 if (IsLiteralCompareBool(isolate(), left, op, right)) {
11464 HCompareObjectEqAndBranch* result =
11465 New<HCompareObjectEqAndBranch>(left, right);
11466 return ast_context()->ReturnControl(result, expr->id());
11467 }
11468
11469 if (op == Token::INSTANCEOF) {
11470 // Check to see if the rhs of the instanceof is a known function.
11471 if (right->IsConstant() &&
11472 HConstant::cast(right)->handle(isolate())->IsJSFunction()) {
11473 Handle<JSFunction> constructor =
11474 Handle<JSFunction>::cast(HConstant::cast(right)->handle(isolate()));
11475 if (constructor->IsConstructor() &&
11476 !constructor->map()->has_non_instance_prototype()) {
11477 JSFunction::EnsureHasInitialMap(constructor);
11478 DCHECK(constructor->has_initial_map());
11479 Handle<Map> initial_map(constructor->initial_map(), isolate());
11480 top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
11481 HInstruction* prototype =
11482 Add<HConstant>(handle(initial_map->prototype(), isolate()));
11483 HHasInPrototypeChainAndBranch* result =
11484 New<HHasInPrototypeChainAndBranch>(left, prototype);
11485 return ast_context()->ReturnControl(result, expr->id());
11486 }
11487 }
11488
11489 HInstanceOf* result = New<HInstanceOf>(left, right);
11490 return ast_context()->ReturnInstruction(result, expr->id());
11491
11492 } else if (op == Token::IN) {
11493 Add<HPushArguments>(left, right);
11494 HInstruction* result =
11495 New<HCallRuntime>(Runtime::FunctionForId(Runtime::kHasProperty), 2);
11496 return ast_context()->ReturnInstruction(result, expr->id());
11497 }
11498
11499 PushBeforeSimulateBehavior push_behavior =
11500 ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11501 : PUSH_BEFORE_SIMULATE;
11502 HControlInstruction* compare = BuildCompareInstruction(
11503 op, left, right, left_type, right_type, combined_type,
11504 ScriptPositionToSourcePosition(expr->left()->position()),
11505 ScriptPositionToSourcePosition(expr->right()->position()),
11506 push_behavior, expr->id());
11507 if (compare == NULL) return; // Bailed out.
11508 return ast_context()->ReturnControl(compare, expr->id());
11509}
11510
11511
11512HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
11513 Token::Value op, HValue* left, HValue* right, Type* left_type,
11514 Type* right_type, Type* combined_type, SourcePosition left_position,
11515 SourcePosition right_position, PushBeforeSimulateBehavior push_sim_result,
11516 BailoutId bailout_id) {
11517 // Cases handled below depend on collected type feedback. They should
11518 // soft deoptimize when there is no type feedback.
11519 if (!combined_type->IsInhabited()) {
11520 Add<HDeoptimize>(
11521 Deoptimizer::kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation,
11522 Deoptimizer::SOFT);
11523 combined_type = left_type = right_type = Type::Any(zone());
11524 }
11525
11526 Representation left_rep = RepresentationFor(left_type);
11527 Representation right_rep = RepresentationFor(right_type);
11528 Representation combined_rep = RepresentationFor(combined_type);
11529
11530 if (combined_type->Is(Type::Receiver())) {
11531 if (Token::IsEqualityOp(op)) {
11532 // HCompareObjectEqAndBranch can only deal with object, so
11533 // exclude numbers.
11534 if ((left->IsConstant() &&
11535 HConstant::cast(left)->HasNumberValue()) ||
11536 (right->IsConstant() &&
11537 HConstant::cast(right)->HasNumberValue())) {
11538 Add<HDeoptimize>(Deoptimizer::kTypeMismatchBetweenFeedbackAndConstant,
11539 Deoptimizer::SOFT);
11540 // The caller expects a branch instruction, so make it happy.
11541 return New<HBranch>(graph()->GetConstantTrue());
11542 }
11543 // Can we get away with map check and not instance type check?
11544 HValue* operand_to_check =
11545 left->block()->block_id() < right->block()->block_id() ? left : right;
11546 if (combined_type->IsClass()) {
11547 Handle<Map> map = combined_type->AsClass()->Map();
11548 AddCheckMap(operand_to_check, map);
11549 HCompareObjectEqAndBranch* result =
11550 New<HCompareObjectEqAndBranch>(left, right);
11551 if (top_info()->is_tracking_positions()) {
11552 result->set_operand_position(zone(), 0, left_position);
11553 result->set_operand_position(zone(), 1, right_position);
11554 }
11555 return result;
11556 } else {
11557 BuildCheckHeapObject(operand_to_check);
11558 Add<HCheckInstanceType>(operand_to_check,
11559 HCheckInstanceType::IS_JS_RECEIVER);
11560 HCompareObjectEqAndBranch* result =
11561 New<HCompareObjectEqAndBranch>(left, right);
11562 return result;
11563 }
11564 } else {
11565 if (combined_type->IsClass()) {
11566 // TODO(bmeurer): This is an optimized version of an x < y, x > y,
11567 // x <= y or x >= y, where both x and y are spec objects with the
11568 // same map. The CompareIC collects this map for us. So if we know
11569 // that there's no @@toPrimitive on the map (including the prototype
11570 // chain), and both valueOf and toString are the default initial
11571 // implementations (on the %ObjectPrototype%), then we can reduce
11572 // the comparison to map checks on x and y, because the comparison
11573 // will turn into a comparison of "[object CLASS]" to itself (the
11574 // default outcome of toString, since valueOf returns a spec object).
11575 // This is pretty much adhoc, so in TurboFan we could do a lot better
11576 // and inline the interesting parts of ToPrimitive (actually we could
11577 // even do that in Crankshaft but we don't want to waste too much
11578 // time on this now).
11579 DCHECK(Token::IsOrderedRelationalCompareOp(op));
11580 Handle<Map> map = combined_type->AsClass()->Map();
11581 PropertyAccessInfo value_of(this, LOAD, map,
11582 isolate()->factory()->valueOf_string());
11583 PropertyAccessInfo to_primitive(
11584 this, LOAD, map, isolate()->factory()->to_primitive_symbol());
11585 PropertyAccessInfo to_string(this, LOAD, map,
11586 isolate()->factory()->toString_string());
11587 PropertyAccessInfo to_string_tag(
11588 this, LOAD, map, isolate()->factory()->to_string_tag_symbol());
11589 if (to_primitive.CanAccessMonomorphic() && !to_primitive.IsFound() &&
11590 to_string_tag.CanAccessMonomorphic() &&
11591 (!to_string_tag.IsFound() || to_string_tag.IsData() ||
11592 to_string_tag.IsDataConstant()) &&
11593 value_of.CanAccessMonomorphic() && value_of.IsDataConstant() &&
11594 value_of.constant().is_identical_to(isolate()->object_value_of()) &&
11595 to_string.CanAccessMonomorphic() && to_string.IsDataConstant() &&
11596 to_string.constant().is_identical_to(
11597 isolate()->object_to_string())) {
11598 // We depend on the prototype chain to stay the same, because we
11599 // also need to deoptimize when someone installs @@toPrimitive
11600 // or @@toStringTag somewhere in the prototype chain.
11601 BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
11602 Handle<JSObject>::null());
11603 AddCheckMap(left, map);
11604 AddCheckMap(right, map);
11605 // The caller expects a branch instruction, so make it happy.
11606 return New<HBranch>(
11607 graph()->GetConstantBool(op == Token::LTE || op == Token::GTE));
11608 }
11609 }
11610 Bailout(kUnsupportedNonPrimitiveCompare);
11611 return NULL;
11612 }
11613 } else if (combined_type->Is(Type::InternalizedString()) &&
11614 Token::IsEqualityOp(op)) {
11615 // If we have a constant argument, it should be consistent with the type
11616 // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
11617 if ((left->IsConstant() &&
11618 !HConstant::cast(left)->HasInternalizedStringValue()) ||
11619 (right->IsConstant() &&
11620 !HConstant::cast(right)->HasInternalizedStringValue())) {
11621 Add<HDeoptimize>(Deoptimizer::kTypeMismatchBetweenFeedbackAndConstant,
11622 Deoptimizer::SOFT);
11623 // The caller expects a branch instruction, so make it happy.
11624 return New<HBranch>(graph()->GetConstantTrue());
11625 }
11626 BuildCheckHeapObject(left);
11627 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
11628 BuildCheckHeapObject(right);
11629 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
11630 HCompareObjectEqAndBranch* result =
11631 New<HCompareObjectEqAndBranch>(left, right);
11632 return result;
11633 } else if (combined_type->Is(Type::String())) {
11634 BuildCheckHeapObject(left);
11635 Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
11636 BuildCheckHeapObject(right);
11637 Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
11638 HStringCompareAndBranch* result =
11639 New<HStringCompareAndBranch>(left, right, op);
11640 return result;
11641 } else if (combined_type->Is(Type::Boolean())) {
11642 AddCheckMap(left, isolate()->factory()->boolean_map());
11643 AddCheckMap(right, isolate()->factory()->boolean_map());
11644 if (Token::IsEqualityOp(op)) {
11645 HCompareObjectEqAndBranch* result =
11646 New<HCompareObjectEqAndBranch>(left, right);
11647 return result;
11648 }
11649 left = Add<HLoadNamedField>(
11650 left, nullptr,
11651 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11652 right = Add<HLoadNamedField>(
11653 right, nullptr,
11654 HObjectAccess::ForOddballToNumber(Representation::Smi()));
11655 HCompareNumericAndBranch* result =
11656 New<HCompareNumericAndBranch>(left, right, op);
11657 return result;
11658 } else {
11659 if (combined_rep.IsTagged() || combined_rep.IsNone()) {
11660 HCompareGeneric* result = Add<HCompareGeneric>(
11661 left, right, op, strength(function_language_mode()));
11662 result->set_observed_input_representation(1, left_rep);
11663 result->set_observed_input_representation(2, right_rep);
11664 if (result->HasObservableSideEffects()) {
11665 if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11666 Push(result);
11667 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11668 Drop(1);
11669 } else {
11670 AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11671 }
11672 }
11673 // TODO(jkummerow): Can we make this more efficient?
11674 HBranch* branch = New<HBranch>(result);
11675 return branch;
11676 } else {
11677 HCompareNumericAndBranch* result = New<HCompareNumericAndBranch>(
11678 left, right, op, strength(function_language_mode()));
11679 result->set_observed_input_representation(left_rep, right_rep);
11680 if (top_info()->is_tracking_positions()) {
11681 result->SetOperandPositions(zone(), left_position, right_position);
11682 }
11683 return result;
11684 }
11685 }
11686}
11687
11688
11689void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
11690 Expression* sub_expr,
11691 NilValue nil) {
11692 DCHECK(!HasStackOverflow());
11693 DCHECK(current_block() != NULL);
11694 DCHECK(current_block()->HasPredecessor());
11695 DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
11696 if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
11697 CHECK_ALIVE(VisitForValue(sub_expr));
11698 HValue* value = Pop();
11699 if (expr->op() == Token::EQ_STRICT) {
11700 HConstant* nil_constant = nil == kNullValue
11701 ? graph()->GetConstantNull()
11702 : graph()->GetConstantUndefined();
11703 HCompareObjectEqAndBranch* instr =
11704 New<HCompareObjectEqAndBranch>(value, nil_constant);
11705 return ast_context()->ReturnControl(instr, expr->id());
11706 } else {
11707 DCHECK_EQ(Token::EQ, expr->op());
11708 Type* type = expr->combined_type()->Is(Type::None())
11709 ? Type::Any(zone()) : expr->combined_type();
11710 HIfContinuation continuation;
11711 BuildCompareNil(value, type, &continuation);
11712 return ast_context()->ReturnContinuation(&continuation, expr->id());
11713 }
11714}
11715
11716
11717void HOptimizedGraphBuilder::VisitSpread(Spread* expr) { UNREACHABLE(); }
11718
11719
11720void HOptimizedGraphBuilder::VisitEmptyParentheses(EmptyParentheses* expr) {
11721 UNREACHABLE();
11722}
11723
11724
11725HValue* HOptimizedGraphBuilder::AddThisFunction() {
11726 return AddInstruction(BuildThisFunction());
11727}
11728
11729
11730HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
11731 // If we share optimized code between different closures, the
11732 // this-function is not a constant, except inside an inlined body.
11733 if (function_state()->outer() != NULL) {
11734 return New<HConstant>(
11735 function_state()->compilation_info()->closure());
11736 } else {
11737 return New<HThisFunction>();
11738 }
11739}
11740
11741
11742HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
11743 Handle<JSObject> boilerplate_object,
11744 AllocationSiteUsageContext* site_context) {
11745 NoObservableSideEffectsScope no_effects(this);
11746 Handle<Map> initial_map(boilerplate_object->map());
11747 InstanceType instance_type = initial_map->instance_type();
11748 DCHECK(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
11749
11750 HType type = instance_type == JS_ARRAY_TYPE
11751 ? HType::JSArray() : HType::JSObject();
11752 HValue* object_size_constant = Add<HConstant>(initial_map->instance_size());
11753
11754 PretenureFlag pretenure_flag = NOT_TENURED;
11755 Handle<AllocationSite> top_site(*site_context->top(), isolate());
11756 if (FLAG_allocation_site_pretenuring) {
11757 pretenure_flag = top_site->GetPretenureMode();
11758 }
11759
11760 Handle<AllocationSite> current_site(*site_context->current(), isolate());
11761 if (*top_site == *current_site) {
11762 // We install a dependency for pretenuring only on the outermost literal.
11763 top_info()->dependencies()->AssumeTenuringDecision(top_site);
11764 }
11765 top_info()->dependencies()->AssumeTransitionStable(current_site);
11766
11767 HInstruction* object = Add<HAllocate>(
11768 object_size_constant, type, pretenure_flag, instance_type, top_site);
11769
11770 // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
11771 // elements array may not get folded into the object. Hence, we set the
11772 // elements pointer to empty fixed array and let store elimination remove
11773 // this store in the folding case.
11774 HConstant* empty_fixed_array = Add<HConstant>(
11775 isolate()->factory()->empty_fixed_array());
11776 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11777 empty_fixed_array);
11778
11779 BuildEmitObjectHeader(boilerplate_object, object);
11780
11781 // Similarly to the elements pointer, there is no guarantee that all
11782 // property allocations can get folded, so pre-initialize all in-object
11783 // properties to a safe value.
11784 BuildInitializeInobjectProperties(object, initial_map);
11785
11786 Handle<FixedArrayBase> elements(boilerplate_object->elements());
11787 int elements_size = (elements->length() > 0 &&
11788 elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
11789 elements->Size() : 0;
11790
11791 if (pretenure_flag == TENURED &&
11792 elements->map() == isolate()->heap()->fixed_cow_array_map() &&
11793 isolate()->heap()->InNewSpace(*elements)) {
11794 // If we would like to pretenure a fixed cow array, we must ensure that the
11795 // array is already in old space, otherwise we'll create too many old-to-
11796 // new-space pointers (overflowing the store buffer).
11797 elements = Handle<FixedArrayBase>(
11798 isolate()->factory()->CopyAndTenureFixedCOWArray(
11799 Handle<FixedArray>::cast(elements)));
11800 boilerplate_object->set_elements(*elements);
11801 }
11802
11803 HInstruction* object_elements = NULL;
11804 if (elements_size > 0) {
11805 HValue* object_elements_size = Add<HConstant>(elements_size);
11806 InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
11807 ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
11808 object_elements = Add<HAllocate>(object_elements_size, HType::HeapObject(),
11809 pretenure_flag, instance_type, top_site);
11810 BuildEmitElements(boilerplate_object, elements, object_elements,
11811 site_context);
11812 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11813 object_elements);
11814 } else {
11815 Handle<Object> elements_field =
11816 Handle<Object>(boilerplate_object->elements(), isolate());
11817 HInstruction* object_elements_cow = Add<HConstant>(elements_field);
11818 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11819 object_elements_cow);
11820 }
11821
11822 // Copy in-object properties.
11823 if (initial_map->NumberOfFields() != 0 ||
11824 initial_map->unused_property_fields() > 0) {
11825 BuildEmitInObjectProperties(boilerplate_object, object, site_context,
11826 pretenure_flag);
11827 }
11828 return object;
11829}
11830
11831
11832void HOptimizedGraphBuilder::BuildEmitObjectHeader(
11833 Handle<JSObject> boilerplate_object,
11834 HInstruction* object) {
11835 DCHECK(boilerplate_object->properties()->length() == 0);
11836
11837 Handle<Map> boilerplate_object_map(boilerplate_object->map());
11838 AddStoreMapConstant(object, boilerplate_object_map);
11839
11840 Handle<Object> properties_field =
11841 Handle<Object>(boilerplate_object->properties(), isolate());
11842 DCHECK(*properties_field == isolate()->heap()->empty_fixed_array());
11843 HInstruction* properties = Add<HConstant>(properties_field);
11844 HObjectAccess access = HObjectAccess::ForPropertiesPointer();
11845 Add<HStoreNamedField>(object, access, properties);
11846
11847 if (boilerplate_object->IsJSArray()) {
11848 Handle<JSArray> boilerplate_array =
11849 Handle<JSArray>::cast(boilerplate_object);
11850 Handle<Object> length_field =
11851 Handle<Object>(boilerplate_array->length(), isolate());
11852 HInstruction* length = Add<HConstant>(length_field);
11853
11854 DCHECK(boilerplate_array->length()->IsSmi());
11855 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
11856 boilerplate_array->GetElementsKind()), length);
11857 }
11858}
11859
11860
11861void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
11862 Handle<JSObject> boilerplate_object,
11863 HInstruction* object,
11864 AllocationSiteUsageContext* site_context,
11865 PretenureFlag pretenure_flag) {
11866 Handle<Map> boilerplate_map(boilerplate_object->map());
11867 Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
11868 int limit = boilerplate_map->NumberOfOwnDescriptors();
11869
11870 int copied_fields = 0;
11871 for (int i = 0; i < limit; i++) {
11872 PropertyDetails details = descriptors->GetDetails(i);
11873 if (details.type() != DATA) continue;
11874 copied_fields++;
11875 FieldIndex field_index = FieldIndex::ForDescriptor(*boilerplate_map, i);
11876
11877
11878 int property_offset = field_index.offset();
11879 Handle<Name> name(descriptors->GetKey(i));
11880
11881 // The access for the store depends on the type of the boilerplate.
11882 HObjectAccess access = boilerplate_object->IsJSArray() ?
11883 HObjectAccess::ForJSArrayOffset(property_offset) :
11884 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11885
11886 if (boilerplate_object->IsUnboxedDoubleField(field_index)) {
11887 CHECK(!boilerplate_object->IsJSArray());
11888 double value = boilerplate_object->RawFastDoublePropertyAt(field_index);
11889 access = access.WithRepresentation(Representation::Double());
11890 Add<HStoreNamedField>(object, access, Add<HConstant>(value));
11891 continue;
11892 }
11893 Handle<Object> value(boilerplate_object->RawFastPropertyAt(field_index),
11894 isolate());
11895
11896 if (value->IsJSObject()) {
11897 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11898 Handle<AllocationSite> current_site = site_context->EnterNewScope();
11899 HInstruction* result =
11900 BuildFastLiteral(value_object, site_context);
11901 site_context->ExitScope(current_site, value_object);
11902 Add<HStoreNamedField>(object, access, result);
11903 } else {
11904 Representation representation = details.representation();
11905 HInstruction* value_instruction;
11906
11907 if (representation.IsDouble()) {
11908 // Allocate a HeapNumber box and store the value into it.
11909 HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
11910 HInstruction* double_box =
11911 Add<HAllocate>(heap_number_constant, HType::HeapObject(),
11912 pretenure_flag, MUTABLE_HEAP_NUMBER_TYPE);
11913 AddStoreMapConstant(double_box,
11914 isolate()->factory()->mutable_heap_number_map());
11915 // Unwrap the mutable heap number from the boilerplate.
11916 HValue* double_value =
11917 Add<HConstant>(Handle<HeapNumber>::cast(value)->value());
11918 Add<HStoreNamedField>(
11919 double_box, HObjectAccess::ForHeapNumberValue(), double_value);
11920 value_instruction = double_box;
11921 } else if (representation.IsSmi()) {
11922 value_instruction = value->IsUninitialized()
11923 ? graph()->GetConstant0()
11924 : Add<HConstant>(value);
11925 // Ensure that value is stored as smi.
11926 access = access.WithRepresentation(representation);
11927 } else {
11928 value_instruction = Add<HConstant>(value);
11929 }
11930
11931 Add<HStoreNamedField>(object, access, value_instruction);
11932 }
11933 }
11934
11935 int inobject_properties = boilerplate_object->map()->GetInObjectProperties();
11936 HInstruction* value_instruction =
11937 Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
11938 for (int i = copied_fields; i < inobject_properties; i++) {
11939 DCHECK(boilerplate_object->IsJSObject());
11940 int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
11941 HObjectAccess access =
11942 HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11943 Add<HStoreNamedField>(object, access, value_instruction);
11944 }
11945}
11946
11947
11948void HOptimizedGraphBuilder::BuildEmitElements(
11949 Handle<JSObject> boilerplate_object,
11950 Handle<FixedArrayBase> elements,
11951 HValue* object_elements,
11952 AllocationSiteUsageContext* site_context) {
11953 ElementsKind kind = boilerplate_object->map()->elements_kind();
11954 int elements_length = elements->length();
11955 HValue* object_elements_length = Add<HConstant>(elements_length);
11956 BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
11957
11958 // Copy elements backing store content.
11959 if (elements->IsFixedDoubleArray()) {
11960 BuildEmitFixedDoubleArray(elements, kind, object_elements);
11961 } else if (elements->IsFixedArray()) {
11962 BuildEmitFixedArray(elements, kind, object_elements,
11963 site_context);
11964 } else {
11965 UNREACHABLE();
11966 }
11967}
11968
11969
11970void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
11971 Handle<FixedArrayBase> elements,
11972 ElementsKind kind,
11973 HValue* object_elements) {
11974 HInstruction* boilerplate_elements = Add<HConstant>(elements);
11975 int elements_length = elements->length();
11976 for (int i = 0; i < elements_length; i++) {
11977 HValue* key_constant = Add<HConstant>(i);
11978 HInstruction* value_instruction =
11979 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
11980 kind, ALLOW_RETURN_HOLE);
11981 HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
11982 value_instruction, nullptr, kind);
11983 store->SetFlag(HValue::kAllowUndefinedAsNaN);
11984 }
11985}
11986
11987
11988void HOptimizedGraphBuilder::BuildEmitFixedArray(
11989 Handle<FixedArrayBase> elements,
11990 ElementsKind kind,
11991 HValue* object_elements,
11992 AllocationSiteUsageContext* site_context) {
11993 HInstruction* boilerplate_elements = Add<HConstant>(elements);
11994 int elements_length = elements->length();
11995 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
11996 for (int i = 0; i < elements_length; i++) {
11997 Handle<Object> value(fast_elements->get(i), isolate());
11998 HValue* key_constant = Add<HConstant>(i);
11999 if (value->IsJSObject()) {
12000 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
12001 Handle<AllocationSite> current_site = site_context->EnterNewScope();
12002 HInstruction* result =
12003 BuildFastLiteral(value_object, site_context);
12004 site_context->ExitScope(current_site, value_object);
12005 Add<HStoreKeyed>(object_elements, key_constant, result, nullptr, kind);
12006 } else {
12007 ElementsKind copy_kind =
12008 kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
12009 HInstruction* value_instruction =
12010 Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
12011 copy_kind, ALLOW_RETURN_HOLE);
12012 Add<HStoreKeyed>(object_elements, key_constant, value_instruction,
12013 nullptr, copy_kind);
12014 }
12015 }
12016}
12017
12018
12019void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
12020 DCHECK(!HasStackOverflow());
12021 DCHECK(current_block() != NULL);
12022 DCHECK(current_block()->HasPredecessor());
12023 HInstruction* instr = BuildThisFunction();
12024 return ast_context()->ReturnInstruction(instr, expr->id());
12025}
12026
12027
12028void HOptimizedGraphBuilder::VisitSuperPropertyReference(
12029 SuperPropertyReference* expr) {
12030 DCHECK(!HasStackOverflow());
12031 DCHECK(current_block() != NULL);
12032 DCHECK(current_block()->HasPredecessor());
12033 return Bailout(kSuperReference);
12034}
12035
12036
12037void HOptimizedGraphBuilder::VisitSuperCallReference(SuperCallReference* expr) {
12038 DCHECK(!HasStackOverflow());
12039 DCHECK(current_block() != NULL);
12040 DCHECK(current_block()->HasPredecessor());
12041 return Bailout(kSuperReference);
12042}
12043
12044
12045void HOptimizedGraphBuilder::VisitDeclarations(
12046 ZoneList<Declaration*>* declarations) {
12047 DCHECK(globals_.is_empty());
12048 AstVisitor::VisitDeclarations(declarations);
12049 if (!globals_.is_empty()) {
12050 Handle<FixedArray> array =
12051 isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
12052 for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
12053 int flags =
12054 DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
12055 DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
12056 DeclareGlobalsLanguageMode::encode(current_info()->language_mode());
12057 Add<HDeclareGlobals>(array, flags);
12058 globals_.Rewind(0);
12059 }
12060}
12061
12062
12063void HOptimizedGraphBuilder::VisitVariableDeclaration(
12064 VariableDeclaration* declaration) {
12065 VariableProxy* proxy = declaration->proxy();
12066 VariableMode mode = declaration->mode();
12067 Variable* variable = proxy->var();
12068 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
12069 switch (variable->location()) {
12070 case VariableLocation::GLOBAL:
12071 case VariableLocation::UNALLOCATED:
12072 globals_.Add(variable->name(), zone());
12073 globals_.Add(variable->binding_needs_init()
12074 ? isolate()->factory()->the_hole_value()
12075 : isolate()->factory()->undefined_value(), zone());
12076 return;
12077 case VariableLocation::PARAMETER:
12078 case VariableLocation::LOCAL:
12079 if (hole_init) {
12080 HValue* value = graph()->GetConstantHole();
12081 environment()->Bind(variable, value);
12082 }
12083 break;
12084 case VariableLocation::CONTEXT:
12085 if (hole_init) {
12086 HValue* value = graph()->GetConstantHole();
12087 HValue* context = environment()->context();
12088 HStoreContextSlot* store = Add<HStoreContextSlot>(
12089 context, variable->index(), HStoreContextSlot::kNoCheck, value);
12090 if (store->HasObservableSideEffects()) {
12091 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
12092 }
12093 }
12094 break;
12095 case VariableLocation::LOOKUP:
12096 return Bailout(kUnsupportedLookupSlotInDeclaration);
12097 }
12098}
12099
12100
12101void HOptimizedGraphBuilder::VisitFunctionDeclaration(
12102 FunctionDeclaration* declaration) {
12103 VariableProxy* proxy = declaration->proxy();
12104 Variable* variable = proxy->var();
12105 switch (variable->location()) {
12106 case VariableLocation::GLOBAL:
12107 case VariableLocation::UNALLOCATED: {
12108 globals_.Add(variable->name(), zone());
12109 Handle<SharedFunctionInfo> function = Compiler::GetSharedFunctionInfo(
12110 declaration->fun(), current_info()->script(), top_info());
12111 // Check for stack-overflow exception.
12112 if (function.is_null()) return SetStackOverflow();
12113 globals_.Add(function, zone());
12114 return;
12115 }
12116 case VariableLocation::PARAMETER:
12117 case VariableLocation::LOCAL: {
12118 CHECK_ALIVE(VisitForValue(declaration->fun()));
12119 HValue* value = Pop();
12120 BindIfLive(variable, value);
12121 break;
12122 }
12123 case VariableLocation::CONTEXT: {
12124 CHECK_ALIVE(VisitForValue(declaration->fun()));
12125 HValue* value = Pop();
12126 HValue* context = environment()->context();
12127 HStoreContextSlot* store = Add<HStoreContextSlot>(
12128 context, variable->index(), HStoreContextSlot::kNoCheck, value);
12129 if (store->HasObservableSideEffects()) {
12130 Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
12131 }
12132 break;
12133 }
12134 case VariableLocation::LOOKUP:
12135 return Bailout(kUnsupportedLookupSlotInDeclaration);
12136 }
12137}
12138
12139
12140void HOptimizedGraphBuilder::VisitImportDeclaration(
12141 ImportDeclaration* declaration) {
12142 UNREACHABLE();
12143}
12144
12145
12146void HOptimizedGraphBuilder::VisitExportDeclaration(
12147 ExportDeclaration* declaration) {
12148 UNREACHABLE();
12149}
12150
12151
12152void HOptimizedGraphBuilder::VisitRewritableAssignmentExpression(
12153 RewritableAssignmentExpression* node) {
12154 CHECK_ALIVE(Visit(node->expression()));
12155}
12156
12157
12158// Generators for inline runtime functions.
12159// Support for types.
12160void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
12161 DCHECK(call->arguments()->length() == 1);
12162 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12163 HValue* value = Pop();
12164 HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
12165 return ast_context()->ReturnControl(result, call->id());
12166}
12167
12168
12169void HOptimizedGraphBuilder::GenerateIsJSReceiver(CallRuntime* call) {
12170 DCHECK(call->arguments()->length() == 1);
12171 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12172 HValue* value = Pop();
12173 HHasInstanceTypeAndBranch* result =
12174 New<HHasInstanceTypeAndBranch>(value,
12175 FIRST_JS_RECEIVER_TYPE,
12176 LAST_JS_RECEIVER_TYPE);
12177 return ast_context()->ReturnControl(result, call->id());
12178}
12179
12180
12181void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
12182 DCHECK(call->arguments()->length() == 1);
12183 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12184 HValue* value = Pop();
12185 HHasInstanceTypeAndBranch* result = New<HHasInstanceTypeAndBranch>(
12186 value, FIRST_FUNCTION_TYPE, LAST_FUNCTION_TYPE);
12187 return ast_context()->ReturnControl(result, call->id());
12188}
12189
12190
12191void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
12192 DCHECK(call->arguments()->length() == 1);
12193 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12194 HValue* value = Pop();
12195 HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
12196 return ast_context()->ReturnControl(result, call->id());
12197}
12198
12199
12200void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
12201 DCHECK(call->arguments()->length() == 1);
12202 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12203 HValue* value = Pop();
12204 HHasCachedArrayIndexAndBranch* result =
12205 New<HHasCachedArrayIndexAndBranch>(value);
12206 return ast_context()->ReturnControl(result, call->id());
12207}
12208
12209
12210void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
12211 DCHECK(call->arguments()->length() == 1);
12212 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12213 HValue* value = Pop();
12214 HHasInstanceTypeAndBranch* result =
12215 New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
12216 return ast_context()->ReturnControl(result, call->id());
12217}
12218
12219
12220void HOptimizedGraphBuilder::GenerateIsTypedArray(CallRuntime* call) {
12221 DCHECK(call->arguments()->length() == 1);
12222 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12223 HValue* value = Pop();
12224 HHasInstanceTypeAndBranch* result =
12225 New<HHasInstanceTypeAndBranch>(value, JS_TYPED_ARRAY_TYPE);
12226 return ast_context()->ReturnControl(result, call->id());
12227}
12228
12229
12230void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
12231 DCHECK(call->arguments()->length() == 1);
12232 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12233 HValue* value = Pop();
12234 HHasInstanceTypeAndBranch* result =
12235 New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
12236 return ast_context()->ReturnControl(result, call->id());
12237}
12238
12239
12240void HOptimizedGraphBuilder::GenerateToInteger(CallRuntime* call) {
12241 DCHECK_EQ(1, call->arguments()->length());
12242 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12243 HValue* input = Pop();
12244 if (input->type().IsSmi()) {
12245 return ast_context()->ReturnValue(input);
12246 } else {
12247 IfBuilder if_inputissmi(this);
12248 if_inputissmi.If<HIsSmiAndBranch>(input);
12249 if_inputissmi.Then();
12250 {
12251 // Return the input value.
12252 Push(input);
12253 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12254 }
12255 if_inputissmi.Else();
12256 {
12257 Add<HPushArguments>(input);
12258 Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kToInteger), 1));
12259 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12260 }
12261 if_inputissmi.End();
12262 return ast_context()->ReturnValue(Pop());
12263 }
12264}
12265
12266
12267void HOptimizedGraphBuilder::GenerateToObject(CallRuntime* call) {
12268 DCHECK_EQ(1, call->arguments()->length());
12269 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12270 HValue* value = Pop();
12271 HValue* result = BuildToObject(value);
12272 return ast_context()->ReturnValue(result);
12273}
12274
12275
12276void HOptimizedGraphBuilder::GenerateToString(CallRuntime* call) {
12277 DCHECK_EQ(1, call->arguments()->length());
12278 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12279 Callable callable = CodeFactory::ToString(isolate());
12280 HValue* input = Pop();
12281 if (input->type().IsString()) {
12282 return ast_context()->ReturnValue(input);
12283 } else {
12284 HValue* stub = Add<HConstant>(callable.code());
12285 HValue* values[] = {context(), input};
12286 HInstruction* result =
12287 New<HCallWithDescriptor>(stub, 0, callable.descriptor(),
12288 Vector<HValue*>(values, arraysize(values)));
12289 return ast_context()->ReturnInstruction(result, call->id());
12290 }
12291}
12292
12293
12294void HOptimizedGraphBuilder::GenerateToLength(CallRuntime* call) {
12295 DCHECK_EQ(1, call->arguments()->length());
12296 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12297 Callable callable = CodeFactory::ToLength(isolate());
12298 HValue* input = Pop();
12299 HValue* stub = Add<HConstant>(callable.code());
12300 HValue* values[] = {context(), input};
12301 HInstruction* result =
12302 New<HCallWithDescriptor>(stub, 0, callable.descriptor(),
12303 Vector<HValue*>(values, arraysize(values)));
12304 return ast_context()->ReturnInstruction(result, call->id());
12305}
12306
12307
12308void HOptimizedGraphBuilder::GenerateToNumber(CallRuntime* call) {
12309 DCHECK_EQ(1, call->arguments()->length());
12310 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12311 Callable callable = CodeFactory::ToNumber(isolate());
12312 HValue* input = Pop();
12313 if (input->type().IsTaggedNumber()) {
12314 return ast_context()->ReturnValue(input);
12315 } else {
12316 HValue* stub = Add<HConstant>(callable.code());
12317 HValue* values[] = {context(), input};
12318 HInstruction* result =
12319 New<HCallWithDescriptor>(stub, 0, callable.descriptor(),
12320 Vector<HValue*>(values, arraysize(values)));
12321 return ast_context()->ReturnInstruction(result, call->id());
12322 }
12323}
12324
12325
12326void HOptimizedGraphBuilder::GenerateIsJSProxy(CallRuntime* call) {
12327 DCHECK(call->arguments()->length() == 1);
12328 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12329 HValue* value = Pop();
12330 HIfContinuation continuation;
12331 IfBuilder if_proxy(this);
12332
12333 HValue* smicheck = if_proxy.IfNot<HIsSmiAndBranch>(value);
12334 if_proxy.And();
12335 HValue* map = Add<HLoadNamedField>(value, smicheck, HObjectAccess::ForMap());
12336 HValue* instance_type =
12337 Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
12338 if_proxy.If<HCompareNumericAndBranch>(
12339 instance_type, Add<HConstant>(JS_PROXY_TYPE), Token::EQ);
12340
12341 if_proxy.CaptureContinuation(&continuation);
12342 return ast_context()->ReturnContinuation(&continuation, call->id());
12343}
12344
12345
12346void HOptimizedGraphBuilder::GenerateHasFastPackedElements(CallRuntime* call) {
12347 DCHECK(call->arguments()->length() == 1);
12348 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12349 HValue* object = Pop();
12350 HIfContinuation continuation(graph()->CreateBasicBlock(),
12351 graph()->CreateBasicBlock());
12352 IfBuilder if_not_smi(this);
12353 if_not_smi.IfNot<HIsSmiAndBranch>(object);
12354 if_not_smi.Then();
12355 {
12356 NoObservableSideEffectsScope no_effects(this);
12357
12358 IfBuilder if_fast_packed(this);
12359 HValue* elements_kind = BuildGetElementsKind(object);
12360 if_fast_packed.If<HCompareNumericAndBranch>(
12361 elements_kind, Add<HConstant>(FAST_SMI_ELEMENTS), Token::EQ);
12362 if_fast_packed.Or();
12363 if_fast_packed.If<HCompareNumericAndBranch>(
12364 elements_kind, Add<HConstant>(FAST_ELEMENTS), Token::EQ);
12365 if_fast_packed.Or();
12366 if_fast_packed.If<HCompareNumericAndBranch>(
12367 elements_kind, Add<HConstant>(FAST_DOUBLE_ELEMENTS), Token::EQ);
12368 if_fast_packed.JoinContinuation(&continuation);
12369 }
12370 if_not_smi.JoinContinuation(&continuation);
12371 return ast_context()->ReturnContinuation(&continuation, call->id());
12372}
12373
12374
12375// Support for arguments.length and arguments[?].
12376void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
12377 DCHECK(call->arguments()->length() == 0);
12378 HInstruction* result = NULL;
12379 if (function_state()->outer() == NULL) {
12380 HInstruction* elements = Add<HArgumentsElements>(false);
12381 result = New<HArgumentsLength>(elements);
12382 } else {
12383 // Number of arguments without receiver.
12384 int argument_count = environment()->
12385 arguments_environment()->parameter_count() - 1;
12386 result = New<HConstant>(argument_count);
12387 }
12388 return ast_context()->ReturnInstruction(result, call->id());
12389}
12390
12391
12392void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
12393 DCHECK(call->arguments()->length() == 1);
12394 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12395 HValue* index = Pop();
12396 HInstruction* result = NULL;
12397 if (function_state()->outer() == NULL) {
12398 HInstruction* elements = Add<HArgumentsElements>(false);
12399 HInstruction* length = Add<HArgumentsLength>(elements);
12400 HInstruction* checked_index = Add<HBoundsCheck>(index, length);
12401 result = New<HAccessArgumentsAt>(elements, length, checked_index);
12402 } else {
12403 EnsureArgumentsArePushedForAccess();
12404
12405 // Number of arguments without receiver.
12406 HInstruction* elements = function_state()->arguments_elements();
12407 int argument_count = environment()->
12408 arguments_environment()->parameter_count() - 1;
12409 HInstruction* length = Add<HConstant>(argument_count);
12410 HInstruction* checked_key = Add<HBoundsCheck>(index, length);
12411 result = New<HAccessArgumentsAt>(elements, length, checked_key);
12412 }
12413 return ast_context()->ReturnInstruction(result, call->id());
12414}
12415
12416
12417void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
12418 DCHECK(call->arguments()->length() == 1);
12419 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12420 HValue* object = Pop();
12421
12422 IfBuilder if_objectisvalue(this);
12423 HValue* objectisvalue = if_objectisvalue.If<HHasInstanceTypeAndBranch>(
12424 object, JS_VALUE_TYPE);
12425 if_objectisvalue.Then();
12426 {
12427 // Return the actual value.
12428 Push(Add<HLoadNamedField>(
12429 object, objectisvalue,
12430 HObjectAccess::ForObservableJSObjectOffset(
12431 JSValue::kValueOffset)));
12432 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12433 }
12434 if_objectisvalue.Else();
12435 {
12436 // If the object is not a value return the object.
12437 Push(object);
12438 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12439 }
12440 if_objectisvalue.End();
12441 return ast_context()->ReturnValue(Pop());
12442}
12443
12444
12445void HOptimizedGraphBuilder::GenerateJSValueGetValue(CallRuntime* call) {
12446 DCHECK(call->arguments()->length() == 1);
12447 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12448 HValue* value = Pop();
12449 HInstruction* result = Add<HLoadNamedField>(
12450 value, nullptr,
12451 HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset));
12452 return ast_context()->ReturnInstruction(result, call->id());
12453}
12454
12455
12456void HOptimizedGraphBuilder::GenerateIsDate(CallRuntime* call) {
12457 DCHECK_EQ(1, call->arguments()->length());
12458 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12459 HValue* value = Pop();
12460 HHasInstanceTypeAndBranch* result =
12461 New<HHasInstanceTypeAndBranch>(value, JS_DATE_TYPE);
12462 return ast_context()->ReturnControl(result, call->id());
12463}
12464
12465
12466void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
12467 CallRuntime* call) {
12468 DCHECK(call->arguments()->length() == 3);
12469 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12470 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12471 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12472 HValue* string = Pop();
12473 HValue* value = Pop();
12474 HValue* index = Pop();
12475 Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
12476 index, value);
12477 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12478 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12479}
12480
12481
12482void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
12483 CallRuntime* call) {
12484 DCHECK(call->arguments()->length() == 3);
12485 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12486 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12487 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12488 HValue* string = Pop();
12489 HValue* value = Pop();
12490 HValue* index = Pop();
12491 Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
12492 index, value);
12493 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12494 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12495}
12496
12497
12498void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
12499 DCHECK(call->arguments()->length() == 2);
12500 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12501 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12502 HValue* value = Pop();
12503 HValue* object = Pop();
12504
12505 // Check if object is a JSValue.
12506 IfBuilder if_objectisvalue(this);
12507 if_objectisvalue.If<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
12508 if_objectisvalue.Then();
12509 {
12510 // Create in-object property store to kValueOffset.
12511 Add<HStoreNamedField>(object,
12512 HObjectAccess::ForObservableJSObjectOffset(JSValue::kValueOffset),
12513 value);
12514 if (!ast_context()->IsEffect()) {
12515 Push(value);
12516 }
12517 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12518 }
12519 if_objectisvalue.Else();
12520 {
12521 // Nothing to do in this case.
12522 if (!ast_context()->IsEffect()) {
12523 Push(value);
12524 }
12525 Add<HSimulate>(call->id(), FIXED_SIMULATE);
12526 }
12527 if_objectisvalue.End();
12528 if (!ast_context()->IsEffect()) {
12529 Drop(1);
12530 }
12531 return ast_context()->ReturnValue(value);
12532}
12533
12534
12535// Fast support for charCodeAt(n).
12536void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
12537 DCHECK(call->arguments()->length() == 2);
12538 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12539 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12540 HValue* index = Pop();
12541 HValue* string = Pop();
12542 HInstruction* result = BuildStringCharCodeAt(string, index);
12543 return ast_context()->ReturnInstruction(result, call->id());
12544}
12545
12546
12547// Fast support for string.charAt(n) and string[n].
12548void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
12549 DCHECK(call->arguments()->length() == 1);
12550 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12551 HValue* char_code = Pop();
12552 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
12553 return ast_context()->ReturnInstruction(result, call->id());
12554}
12555
12556
12557// Fast support for string.charAt(n) and string[n].
12558void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
12559 DCHECK(call->arguments()->length() == 2);
12560 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12561 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12562 HValue* index = Pop();
12563 HValue* string = Pop();
12564 HInstruction* char_code = BuildStringCharCodeAt(string, index);
12565 AddInstruction(char_code);
12566 HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
12567 return ast_context()->ReturnInstruction(result, call->id());
12568}
12569
12570
12571// Fast support for object equality testing.
12572void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
12573 DCHECK(call->arguments()->length() == 2);
12574 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12575 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12576 HValue* right = Pop();
12577 HValue* left = Pop();
12578 HCompareObjectEqAndBranch* result =
12579 New<HCompareObjectEqAndBranch>(left, right);
12580 return ast_context()->ReturnControl(result, call->id());
12581}
12582
12583
12584// Fast support for SubString.
12585void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
12586 DCHECK_EQ(3, call->arguments()->length());
12587 CHECK_ALIVE(VisitExpressions(call->arguments()));
12588 PushArgumentsFromEnvironment(call->arguments()->length());
12589 HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
12590 return ast_context()->ReturnInstruction(result, call->id());
12591}
12592
12593
12594// Support for direct calls from JavaScript to native RegExp code.
12595void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
12596 DCHECK_EQ(4, call->arguments()->length());
12597 CHECK_ALIVE(VisitExpressions(call->arguments()));
12598 PushArgumentsFromEnvironment(call->arguments()->length());
12599 HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
12600 return ast_context()->ReturnInstruction(result, call->id());
12601}
12602
12603
12604void HOptimizedGraphBuilder::GenerateRegExpFlags(CallRuntime* call) {
12605 DCHECK_EQ(1, call->arguments()->length());
12606 CHECK_ALIVE(VisitExpressions(call->arguments()));
12607 HValue* regexp = Pop();
12608 HInstruction* result =
12609 New<HLoadNamedField>(regexp, nullptr, HObjectAccess::ForJSRegExpFlags());
12610 return ast_context()->ReturnInstruction(result, call->id());
12611}
12612
12613
12614void HOptimizedGraphBuilder::GenerateRegExpSource(CallRuntime* call) {
12615 DCHECK_EQ(1, call->arguments()->length());
12616 CHECK_ALIVE(VisitExpressions(call->arguments()));
12617 HValue* regexp = Pop();
12618 HInstruction* result =
12619 New<HLoadNamedField>(regexp, nullptr, HObjectAccess::ForJSRegExpSource());
12620 return ast_context()->ReturnInstruction(result, call->id());
12621}
12622
12623
12624void HOptimizedGraphBuilder::GenerateDoubleLo(CallRuntime* call) {
12625 DCHECK_EQ(1, call->arguments()->length());
12626 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12627 HValue* value = Pop();
12628 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::LOW);
12629 return ast_context()->ReturnInstruction(result, call->id());
12630}
12631
12632
12633void HOptimizedGraphBuilder::GenerateDoubleHi(CallRuntime* call) {
12634 DCHECK_EQ(1, call->arguments()->length());
12635 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12636 HValue* value = Pop();
12637 HInstruction* result = NewUncasted<HDoubleBits>(value, HDoubleBits::HIGH);
12638 return ast_context()->ReturnInstruction(result, call->id());
12639}
12640
12641
12642void HOptimizedGraphBuilder::GenerateConstructDouble(CallRuntime* call) {
12643 DCHECK_EQ(2, call->arguments()->length());
12644 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12645 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12646 HValue* lo = Pop();
12647 HValue* hi = Pop();
12648 HInstruction* result = NewUncasted<HConstructDouble>(hi, lo);
12649 return ast_context()->ReturnInstruction(result, call->id());
12650}
12651
12652
12653// Construct a RegExp exec result with two in-object properties.
12654void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
12655 DCHECK_EQ(3, call->arguments()->length());
12656 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12657 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12658 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12659 HValue* input = Pop();
12660 HValue* index = Pop();
12661 HValue* length = Pop();
12662 HValue* result = BuildRegExpConstructResult(length, index, input);
12663 return ast_context()->ReturnValue(result);
12664}
12665
12666
12667// Fast support for number to string.
12668void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
12669 DCHECK_EQ(1, call->arguments()->length());
12670 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12671 HValue* number = Pop();
12672 HValue* result = BuildNumberToString(number, Type::Any(zone()));
12673 return ast_context()->ReturnValue(result);
12674}
12675
12676
12677// Fast support for calls.
12678void HOptimizedGraphBuilder::GenerateCall(CallRuntime* call) {
12679 DCHECK_LE(2, call->arguments()->length());
12680 CHECK_ALIVE(VisitExpressions(call->arguments()));
12681 CallTrampolineDescriptor descriptor(isolate());
12682 PushArgumentsFromEnvironment(call->arguments()->length() - 1);
12683 HValue* trampoline = Add<HConstant>(isolate()->builtins()->Call());
12684 HValue* target = Pop();
12685 HValue* values[] = {context(), target,
12686 Add<HConstant>(call->arguments()->length() - 2)};
12687 HInstruction* result = New<HCallWithDescriptor>(
12688 trampoline, call->arguments()->length() - 1, descriptor,
12689 Vector<HValue*>(values, arraysize(values)));
12690 return ast_context()->ReturnInstruction(result, call->id());
12691}
12692
12693
12694// Fast call to math functions.
12695void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
12696 DCHECK_EQ(2, call->arguments()->length());
12697 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12698 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12699 HValue* right = Pop();
12700 HValue* left = Pop();
12701 HInstruction* result = NewUncasted<HPower>(left, right);
12702 return ast_context()->ReturnInstruction(result, call->id());
12703}
12704
12705
12706void HOptimizedGraphBuilder::GenerateMathClz32(CallRuntime* call) {
12707 DCHECK(call->arguments()->length() == 1);
12708 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12709 HValue* value = Pop();
12710 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathClz32);
12711 return ast_context()->ReturnInstruction(result, call->id());
12712}
12713
12714
12715void HOptimizedGraphBuilder::GenerateMathFloor(CallRuntime* call) {
12716 DCHECK(call->arguments()->length() == 1);
12717 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12718 HValue* value = Pop();
12719 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathFloor);
12720 return ast_context()->ReturnInstruction(result, call->id());
12721}
12722
12723
12724void HOptimizedGraphBuilder::GenerateMathLogRT(CallRuntime* call) {
12725 DCHECK(call->arguments()->length() == 1);
12726 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12727 HValue* value = Pop();
12728 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathLog);
12729 return ast_context()->ReturnInstruction(result, call->id());
12730}
12731
12732
12733void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
12734 DCHECK(call->arguments()->length() == 1);
12735 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12736 HValue* value = Pop();
12737 HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
12738 return ast_context()->ReturnInstruction(result, call->id());
12739}
12740
12741
12742void HOptimizedGraphBuilder::GenerateFixedArrayGet(CallRuntime* call) {
12743 DCHECK(call->arguments()->length() == 2);
12744 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12745 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12746 HValue* index = Pop();
12747 HValue* object = Pop();
12748 HInstruction* result = New<HLoadKeyed>(
12749 object, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
12750 return ast_context()->ReturnInstruction(result, call->id());
12751}
12752
12753
12754void HOptimizedGraphBuilder::GenerateFixedArraySet(CallRuntime* call) {
12755 DCHECK(call->arguments()->length() == 3);
12756 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12757 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12758 CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12759 HValue* value = Pop();
12760 HValue* index = Pop();
12761 HValue* object = Pop();
12762 NoObservableSideEffectsScope no_effects(this);
12763 Add<HStoreKeyed>(object, index, value, nullptr, FAST_HOLEY_ELEMENTS);
12764 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12765}
12766
12767
12768void HOptimizedGraphBuilder::GenerateTheHole(CallRuntime* call) {
12769 DCHECK(call->arguments()->length() == 0);
12770 return ast_context()->ReturnValue(graph()->GetConstantHole());
12771}
12772
12773
12774void HOptimizedGraphBuilder::GenerateCreateIterResultObject(CallRuntime* call) {
12775 DCHECK_EQ(2, call->arguments()->length());
12776 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12777 CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12778 HValue* done = Pop();
12779 HValue* value = Pop();
12780 HValue* result = BuildCreateIterResultObject(value, done);
12781 return ast_context()->ReturnValue(result);
12782}
12783
12784
12785void HOptimizedGraphBuilder::GenerateJSCollectionGetTable(CallRuntime* call) {
12786 DCHECK(call->arguments()->length() == 1);
12787 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12788 HValue* receiver = Pop();
12789 HInstruction* result = New<HLoadNamedField>(
12790 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12791 return ast_context()->ReturnInstruction(result, call->id());
12792}
12793
12794
12795void HOptimizedGraphBuilder::GenerateStringGetRawHashField(CallRuntime* call) {
12796 DCHECK(call->arguments()->length() == 1);
12797 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12798 HValue* object = Pop();
12799 HInstruction* result = New<HLoadNamedField>(
12800 object, nullptr, HObjectAccess::ForStringHashField());
12801 return ast_context()->ReturnInstruction(result, call->id());
12802}
12803
12804
12805template <typename CollectionType>
12806HValue* HOptimizedGraphBuilder::BuildAllocateOrderedHashTable() {
12807 static const int kCapacity = CollectionType::kMinCapacity;
12808 static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
12809 static const int kFixedArrayLength = CollectionType::kHashTableStartIndex +
12810 kBucketCount +
12811 (kCapacity * CollectionType::kEntrySize);
12812 static const int kSizeInBytes =
12813 FixedArray::kHeaderSize + (kFixedArrayLength * kPointerSize);
12814
12815 // Allocate the table and add the proper map.
12816 HValue* table =
12817 Add<HAllocate>(Add<HConstant>(kSizeInBytes), HType::HeapObject(),
12818 NOT_TENURED, FIXED_ARRAY_TYPE);
12819 AddStoreMapConstant(table, isolate()->factory()->ordered_hash_table_map());
12820
12821 // Initialize the FixedArray...
12822 HValue* length = Add<HConstant>(kFixedArrayLength);
12823 Add<HStoreNamedField>(table, HObjectAccess::ForFixedArrayLength(), length);
12824
12825 // ...and the OrderedHashTable fields.
12826 Add<HStoreNamedField>(
12827 table,
12828 HObjectAccess::ForOrderedHashTableNumberOfBuckets<CollectionType>(),
12829 Add<HConstant>(kBucketCount));
12830 Add<HStoreNamedField>(
12831 table,
12832 HObjectAccess::ForOrderedHashTableNumberOfElements<CollectionType>(),
12833 graph()->GetConstant0());
12834 Add<HStoreNamedField>(
12835 table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12836 CollectionType>(),
12837 graph()->GetConstant0());
12838
12839 // Fill the buckets with kNotFound.
12840 HValue* not_found = Add<HConstant>(CollectionType::kNotFound);
12841 for (int i = 0; i < kBucketCount; ++i) {
12842 Add<HStoreNamedField>(
12843 table, HObjectAccess::ForOrderedHashTableBucket<CollectionType>(i),
12844 not_found);
12845 }
12846
12847 // Fill the data table with undefined.
12848 HValue* undefined = graph()->GetConstantUndefined();
12849 for (int i = 0; i < (kCapacity * CollectionType::kEntrySize); ++i) {
12850 Add<HStoreNamedField>(table,
12851 HObjectAccess::ForOrderedHashTableDataTableIndex<
12852 CollectionType, kBucketCount>(i),
12853 undefined);
12854 }
12855
12856 return table;
12857}
12858
12859
12860void HOptimizedGraphBuilder::GenerateSetInitialize(CallRuntime* call) {
12861 DCHECK(call->arguments()->length() == 1);
12862 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12863 HValue* receiver = Pop();
12864
12865 NoObservableSideEffectsScope no_effects(this);
12866 HValue* table = BuildAllocateOrderedHashTable<OrderedHashSet>();
12867 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12868 return ast_context()->ReturnValue(receiver);
12869}
12870
12871
12872void HOptimizedGraphBuilder::GenerateMapInitialize(CallRuntime* call) {
12873 DCHECK(call->arguments()->length() == 1);
12874 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12875 HValue* receiver = Pop();
12876
12877 NoObservableSideEffectsScope no_effects(this);
12878 HValue* table = BuildAllocateOrderedHashTable<OrderedHashMap>();
12879 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12880 return ast_context()->ReturnValue(receiver);
12881}
12882
12883
12884template <typename CollectionType>
12885void HOptimizedGraphBuilder::BuildOrderedHashTableClear(HValue* receiver) {
12886 HValue* old_table = Add<HLoadNamedField>(
12887 receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12888 HValue* new_table = BuildAllocateOrderedHashTable<CollectionType>();
12889 Add<HStoreNamedField>(
12890 old_table, HObjectAccess::ForOrderedHashTableNextTable<CollectionType>(),
12891 new_table);
12892 Add<HStoreNamedField>(
12893 old_table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12894 CollectionType>(),
12895 Add<HConstant>(CollectionType::kClearedTableSentinel));
12896 Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(),
12897 new_table);
12898}
12899
12900
12901void HOptimizedGraphBuilder::GenerateSetClear(CallRuntime* call) {
12902 DCHECK(call->arguments()->length() == 1);
12903 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12904 HValue* receiver = Pop();
12905
12906 NoObservableSideEffectsScope no_effects(this);
12907 BuildOrderedHashTableClear<OrderedHashSet>(receiver);
12908 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12909}
12910
12911
12912void HOptimizedGraphBuilder::GenerateMapClear(CallRuntime* call) {
12913 DCHECK(call->arguments()->length() == 1);
12914 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12915 HValue* receiver = Pop();
12916
12917 NoObservableSideEffectsScope no_effects(this);
12918 BuildOrderedHashTableClear<OrderedHashMap>(receiver);
12919 return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12920}
12921
12922
12923void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
12924 DCHECK(call->arguments()->length() == 1);
12925 CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12926 HValue* value = Pop();
12927 HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
12928 return ast_context()->ReturnInstruction(result, call->id());
12929}
12930
12931
12932void HOptimizedGraphBuilder::GenerateFastOneByteArrayJoin(CallRuntime* call) {
12933 // Simply returning undefined here would be semantically correct and even
12934 // avoid the bailout. Nevertheless, some ancient benchmarks like SunSpider's
12935 // string-fasta would tank, because fullcode contains an optimized version.
12936 // Obviously the fullcode => Crankshaft => bailout => fullcode dance is
12937 // faster... *sigh*
12938 return Bailout(kInlinedRuntimeFunctionFastOneByteArrayJoin);
12939}
12940
12941
12942void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
12943 CallRuntime* call) {
12944 Add<HDebugBreak>();
12945 return ast_context()->ReturnValue(graph()->GetConstant0());
12946}
12947
12948
12949void HOptimizedGraphBuilder::GenerateDebugIsActive(CallRuntime* call) {
12950 DCHECK(call->arguments()->length() == 0);
12951 HValue* ref =
12952 Add<HConstant>(ExternalReference::debug_is_active_address(isolate()));
12953 HValue* value =
12954 Add<HLoadNamedField>(ref, nullptr, HObjectAccess::ForExternalUInteger8());
12955 return ast_context()->ReturnValue(value);
12956}
12957
12958
12959#undef CHECK_BAILOUT
12960#undef CHECK_ALIVE
12961
12962
12963HEnvironment::HEnvironment(HEnvironment* outer,
12964 Scope* scope,
12965 Handle<JSFunction> closure,
12966 Zone* zone)
12967 : closure_(closure),
12968 values_(0, zone),
12969 frame_type_(JS_FUNCTION),
12970 parameter_count_(0),
12971 specials_count_(1),
12972 local_count_(0),
12973 outer_(outer),
12974 entry_(NULL),
12975 pop_count_(0),
12976 push_count_(0),
12977 ast_id_(BailoutId::None()),
12978 zone_(zone) {
12979 Scope* declaration_scope = scope->DeclarationScope();
12980 Initialize(declaration_scope->num_parameters() + 1,
12981 declaration_scope->num_stack_slots(), 0);
12982}
12983
12984
12985HEnvironment::HEnvironment(Zone* zone, int parameter_count)
12986 : values_(0, zone),
12987 frame_type_(STUB),
12988 parameter_count_(parameter_count),
12989 specials_count_(1),
12990 local_count_(0),
12991 outer_(NULL),
12992 entry_(NULL),
12993 pop_count_(0),
12994 push_count_(0),
12995 ast_id_(BailoutId::None()),
12996 zone_(zone) {
12997 Initialize(parameter_count, 0, 0);
12998}
12999
13000
13001HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
13002 : values_(0, zone),
13003 frame_type_(JS_FUNCTION),
13004 parameter_count_(0),
13005 specials_count_(0),
13006 local_count_(0),
13007 outer_(NULL),
13008 entry_(NULL),
13009 pop_count_(0),
13010 push_count_(0),
13011 ast_id_(other->ast_id()),
13012 zone_(zone) {
13013 Initialize(other);
13014}
13015
13016
13017HEnvironment::HEnvironment(HEnvironment* outer,
13018 Handle<JSFunction> closure,
13019 FrameType frame_type,
13020 int arguments,
13021 Zone* zone)
13022 : closure_(closure),
13023 values_(arguments, zone),
13024 frame_type_(frame_type),
13025 parameter_count_(arguments),
13026 specials_count_(0),
13027 local_count_(0),
13028 outer_(outer),
13029 entry_(NULL),
13030 pop_count_(0),
13031 push_count_(0),
13032 ast_id_(BailoutId::None()),
13033 zone_(zone) {
13034}
13035
13036
13037void HEnvironment::Initialize(int parameter_count,
13038 int local_count,
13039 int stack_height) {
13040 parameter_count_ = parameter_count;
13041 local_count_ = local_count;
13042
13043 // Avoid reallocating the temporaries' backing store on the first Push.
13044 int total = parameter_count + specials_count_ + local_count + stack_height;
13045 values_.Initialize(total + 4, zone());
13046 for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
13047}
13048
13049
13050void HEnvironment::Initialize(const HEnvironment* other) {
13051 closure_ = other->closure();
13052 values_.AddAll(other->values_, zone());
13053 assigned_variables_.Union(other->assigned_variables_, zone());
13054 frame_type_ = other->frame_type_;
13055 parameter_count_ = other->parameter_count_;
13056 local_count_ = other->local_count_;
13057 if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
13058 entry_ = other->entry_;
13059 pop_count_ = other->pop_count_;
13060 push_count_ = other->push_count_;
13061 specials_count_ = other->specials_count_;
13062 ast_id_ = other->ast_id_;
13063}
13064
13065
13066void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
13067 DCHECK(!block->IsLoopHeader());
13068 DCHECK(values_.length() == other->values_.length());
13069
13070 int length = values_.length();
13071 for (int i = 0; i < length; ++i) {
13072 HValue* value = values_[i];
13073 if (value != NULL && value->IsPhi() && value->block() == block) {
13074 // There is already a phi for the i'th value.
13075 HPhi* phi = HPhi::cast(value);
13076 // Assert index is correct and that we haven't missed an incoming edge.
13077 DCHECK(phi->merged_index() == i || !phi->HasMergedIndex());
13078 DCHECK(phi->OperandCount() == block->predecessors()->length());
13079 phi->AddInput(other->values_[i]);
13080 } else if (values_[i] != other->values_[i]) {
13081 // There is a fresh value on the incoming edge, a phi is needed.
13082 DCHECK(values_[i] != NULL && other->values_[i] != NULL);
13083 HPhi* phi = block->AddNewPhi(i);
13084 HValue* old_value = values_[i];
13085 for (int j = 0; j < block->predecessors()->length(); j++) {
13086 phi->AddInput(old_value);
13087 }
13088 phi->AddInput(other->values_[i]);
13089 this->values_[i] = phi;
13090 }
13091 }
13092}
13093
13094
13095void HEnvironment::Bind(int index, HValue* value) {
13096 DCHECK(value != NULL);
13097 assigned_variables_.Add(index, zone());
13098 values_[index] = value;
13099}
13100
13101
13102bool HEnvironment::HasExpressionAt(int index) const {
13103 return index >= parameter_count_ + specials_count_ + local_count_;
13104}
13105
13106
13107bool HEnvironment::ExpressionStackIsEmpty() const {
13108 DCHECK(length() >= first_expression_index());
13109 return length() == first_expression_index();
13110}
13111
13112
13113void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
13114 int count = index_from_top + 1;
13115 int index = values_.length() - count;
13116 DCHECK(HasExpressionAt(index));
13117 // The push count must include at least the element in question or else
13118 // the new value will not be included in this environment's history.
13119 if (push_count_ < count) {
13120 // This is the same effect as popping then re-pushing 'count' elements.
13121 pop_count_ += (count - push_count_);
13122 push_count_ = count;
13123 }
13124 values_[index] = value;
13125}
13126
13127
13128HValue* HEnvironment::RemoveExpressionStackAt(int index_from_top) {
13129 int count = index_from_top + 1;
13130 int index = values_.length() - count;
13131 DCHECK(HasExpressionAt(index));
13132 // Simulate popping 'count' elements and then
13133 // pushing 'count - 1' elements back.
13134 pop_count_ += Max(count - push_count_, 0);
13135 push_count_ = Max(push_count_ - count, 0) + (count - 1);
13136 return values_.Remove(index);
13137}
13138
13139
13140void HEnvironment::Drop(int count) {
13141 for (int i = 0; i < count; ++i) {
13142 Pop();
13143 }
13144}
13145
13146
13147void HEnvironment::Print() const {
13148 OFStream os(stdout);
13149 os << *this << "\n";
13150}
13151
13152
13153HEnvironment* HEnvironment::Copy() const {
13154 return new(zone()) HEnvironment(this, zone());
13155}
13156
13157
13158HEnvironment* HEnvironment::CopyWithoutHistory() const {
13159 HEnvironment* result = Copy();
13160 result->ClearHistory();
13161 return result;
13162}
13163
13164
13165HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
13166 HEnvironment* new_env = Copy();
13167 for (int i = 0; i < values_.length(); ++i) {
13168 HPhi* phi = loop_header->AddNewPhi(i);
13169 phi->AddInput(values_[i]);
13170 new_env->values_[i] = phi;
13171 }
13172 new_env->ClearHistory();
13173 return new_env;
13174}
13175
13176
13177HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
13178 Handle<JSFunction> target,
13179 FrameType frame_type,
13180 int arguments) const {
13181 HEnvironment* new_env =
13182 new(zone()) HEnvironment(outer, target, frame_type,
13183 arguments + 1, zone());
13184 for (int i = 0; i <= arguments; ++i) { // Include receiver.
13185 new_env->Push(ExpressionStackAt(arguments - i));
13186 }
13187 new_env->ClearHistory();
13188 return new_env;
13189}
13190
13191
13192HEnvironment* HEnvironment::CopyForInlining(
13193 Handle<JSFunction> target,
13194 int arguments,
13195 FunctionLiteral* function,
13196 HConstant* undefined,
13197 InliningKind inlining_kind) const {
13198 DCHECK(frame_type() == JS_FUNCTION);
13199
13200 // Outer environment is a copy of this one without the arguments.
13201 int arity = function->scope()->num_parameters();
13202
13203 HEnvironment* outer = Copy();
13204 outer->Drop(arguments + 1); // Including receiver.
13205 outer->ClearHistory();
13206
13207 if (inlining_kind == CONSTRUCT_CALL_RETURN) {
13208 // Create artificial constructor stub environment. The receiver should
13209 // actually be the constructor function, but we pass the newly allocated
13210 // object instead, DoComputeConstructStubFrame() relies on that.
13211 outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
13212 } else if (inlining_kind == GETTER_CALL_RETURN) {
13213 // We need an additional StackFrame::INTERNAL frame for restoring the
13214 // correct context.
13215 outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
13216 } else if (inlining_kind == SETTER_CALL_RETURN) {
13217 // We need an additional StackFrame::INTERNAL frame for temporarily saving
13218 // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
13219 outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
13220 }
13221
13222 if (arity != arguments) {
13223 // Create artificial arguments adaptation environment.
13224 outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
13225 }
13226
13227 HEnvironment* inner =
13228 new(zone()) HEnvironment(outer, function->scope(), target, zone());
13229 // Get the argument values from the original environment.
13230 for (int i = 0; i <= arity; ++i) { // Include receiver.
13231 HValue* push = (i <= arguments) ?
13232 ExpressionStackAt(arguments - i) : undefined;
13233 inner->SetValueAt(i, push);
13234 }
13235 inner->SetValueAt(arity + 1, context());
13236 for (int i = arity + 2; i < inner->length(); ++i) {
13237 inner->SetValueAt(i, undefined);
13238 }
13239
13240 inner->set_ast_id(BailoutId::FunctionEntry());
13241 return inner;
13242}
13243
13244
13245std::ostream& operator<<(std::ostream& os, const HEnvironment& env) {
13246 for (int i = 0; i < env.length(); i++) {
13247 if (i == 0) os << "parameters\n";
13248 if (i == env.parameter_count()) os << "specials\n";
13249 if (i == env.parameter_count() + env.specials_count()) os << "locals\n";
13250 if (i == env.parameter_count() + env.specials_count() + env.local_count()) {
13251 os << "expressions\n";
13252 }
13253 HValue* val = env.values()->at(i);
13254 os << i << ": ";
13255 if (val != NULL) {
13256 os << val;
13257 } else {
13258 os << "NULL";
13259 }
13260 os << "\n";
13261 }
13262 return os << "\n";
13263}
13264
13265
13266void HTracer::TraceCompilation(CompilationInfo* info) {
13267 Tag tag(this, "compilation");
13268 base::SmartArrayPointer<char> name = info->GetDebugName();
13269 if (info->IsOptimizing()) {
13270 PrintStringProperty("name", name.get());
13271 PrintIndent();
13272 trace_.Add("method \"%s:%d\"\n", name.get(), info->optimization_id());
13273 } else {
13274 PrintStringProperty("name", name.get());
13275 PrintStringProperty("method", "stub");
13276 }
13277 PrintLongProperty("date",
13278 static_cast<int64_t>(base::OS::TimeCurrentMillis()));
13279}
13280
13281
13282void HTracer::TraceLithium(const char* name, LChunk* chunk) {
13283 DCHECK(!chunk->isolate()->concurrent_recompilation_enabled());
13284 AllowHandleDereference allow_deref;
13285 AllowDeferredHandleDereference allow_deferred_deref;
13286 Trace(name, chunk->graph(), chunk);
13287}
13288
13289
13290void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
13291 DCHECK(!graph->isolate()->concurrent_recompilation_enabled());
13292 AllowHandleDereference allow_deref;
13293 AllowDeferredHandleDereference allow_deferred_deref;
13294 Trace(name, graph, NULL);
13295}
13296
13297
13298void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
13299 Tag tag(this, "cfg");
13300 PrintStringProperty("name", name);
13301 const ZoneList<HBasicBlock*>* blocks = graph->blocks();
13302 for (int i = 0; i < blocks->length(); i++) {
13303 HBasicBlock* current = blocks->at(i);
13304 Tag block_tag(this, "block");
13305 PrintBlockProperty("name", current->block_id());
13306 PrintIntProperty("from_bci", -1);
13307 PrintIntProperty("to_bci", -1);
13308
13309 if (!current->predecessors()->is_empty()) {
13310 PrintIndent();
13311 trace_.Add("predecessors");
13312 for (int j = 0; j < current->predecessors()->length(); ++j) {
13313 trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
13314 }
13315 trace_.Add("\n");
13316 } else {
13317 PrintEmptyProperty("predecessors");
13318 }
13319
13320 if (current->end()->SuccessorCount() == 0) {
13321 PrintEmptyProperty("successors");
13322 } else {
13323 PrintIndent();
13324 trace_.Add("successors");
13325 for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
13326 trace_.Add(" \"B%d\"", it.Current()->block_id());
13327 }
13328 trace_.Add("\n");
13329 }
13330
13331 PrintEmptyProperty("xhandlers");
13332
13333 {
13334 PrintIndent();
13335 trace_.Add("flags");
13336 if (current->IsLoopSuccessorDominator()) {
13337 trace_.Add(" \"dom-loop-succ\"");
13338 }
13339 if (current->IsUnreachable()) {
13340 trace_.Add(" \"dead\"");
13341 }
13342 if (current->is_osr_entry()) {
13343 trace_.Add(" \"osr\"");
13344 }
13345 trace_.Add("\n");
13346 }
13347
13348 if (current->dominator() != NULL) {
13349 PrintBlockProperty("dominator", current->dominator()->block_id());
13350 }
13351
13352 PrintIntProperty("loop_depth", current->LoopNestingDepth());
13353
13354 if (chunk != NULL) {
13355 int first_index = current->first_instruction_index();
13356 int last_index = current->last_instruction_index();
13357 PrintIntProperty(
13358 "first_lir_id",
13359 LifetimePosition::FromInstructionIndex(first_index).Value());
13360 PrintIntProperty(
13361 "last_lir_id",
13362 LifetimePosition::FromInstructionIndex(last_index).Value());
13363 }
13364
13365 {
13366 Tag states_tag(this, "states");
13367 Tag locals_tag(this, "locals");
13368 int total = current->phis()->length();
13369 PrintIntProperty("size", current->phis()->length());
13370 PrintStringProperty("method", "None");
13371 for (int j = 0; j < total; ++j) {
13372 HPhi* phi = current->phis()->at(j);
13373 PrintIndent();
13374 std::ostringstream os;
13375 os << phi->merged_index() << " " << NameOf(phi) << " " << *phi << "\n";
13376 trace_.Add(os.str().c_str());
13377 }
13378 }
13379
13380 {
13381 Tag HIR_tag(this, "HIR");
13382 for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
13383 HInstruction* instruction = it.Current();
13384 int uses = instruction->UseCount();
13385 PrintIndent();
13386 std::ostringstream os;
13387 os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
13388 if (graph->info()->is_tracking_positions() &&
13389 instruction->has_position() && instruction->position().raw() != 0) {
13390 const SourcePosition pos = instruction->position();
13391 os << " pos:";
13392 if (pos.inlining_id() != 0) os << pos.inlining_id() << "_";
13393 os << pos.position();
13394 }
13395 os << " <|@\n";
13396 trace_.Add(os.str().c_str());
13397 }
13398 }
13399
13400
13401 if (chunk != NULL) {
13402 Tag LIR_tag(this, "LIR");
13403 int first_index = current->first_instruction_index();
13404 int last_index = current->last_instruction_index();
13405 if (first_index != -1 && last_index != -1) {
13406 const ZoneList<LInstruction*>* instructions = chunk->instructions();
13407 for (int i = first_index; i <= last_index; ++i) {
13408 LInstruction* linstr = instructions->at(i);
13409 if (linstr != NULL) {
13410 PrintIndent();
13411 trace_.Add("%d ",
13412 LifetimePosition::FromInstructionIndex(i).Value());
13413 linstr->PrintTo(&trace_);
13414 std::ostringstream os;
13415 os << " [hir:" << NameOf(linstr->hydrogen_value()) << "] <|@\n";
13416 trace_.Add(os.str().c_str());
13417 }
13418 }
13419 }
13420 }
13421 }
13422}
13423
13424
13425void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
13426 Tag tag(this, "intervals");
13427 PrintStringProperty("name", name);
13428
13429 const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
13430 for (int i = 0; i < fixed_d->length(); ++i) {
13431 TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
13432 }
13433
13434 const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
13435 for (int i = 0; i < fixed->length(); ++i) {
13436 TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
13437 }
13438
13439 const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
13440 for (int i = 0; i < live_ranges->length(); ++i) {
13441 TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
13442 }
13443}
13444
13445
13446void HTracer::TraceLiveRange(LiveRange* range, const char* type,
13447 Zone* zone) {
13448 if (range != NULL && !range->IsEmpty()) {
13449 PrintIndent();
13450 trace_.Add("%d %s", range->id(), type);
13451 if (range->HasRegisterAssigned()) {
13452 LOperand* op = range->CreateAssignedOperand(zone);
13453 int assigned_reg = op->index();
13454 if (op->IsDoubleRegister()) {
13455 trace_.Add(" \"%s\"",
13456 DoubleRegister::from_code(assigned_reg).ToString());
13457 } else {
13458 DCHECK(op->IsRegister());
13459 trace_.Add(" \"%s\"", Register::from_code(assigned_reg).ToString());
13460 }
13461 } else if (range->IsSpilled()) {
13462 LOperand* op = range->TopLevel()->GetSpillOperand();
13463 if (op->IsDoubleStackSlot()) {
13464 trace_.Add(" \"double_stack:%d\"", op->index());
13465 } else {
13466 DCHECK(op->IsStackSlot());
13467 trace_.Add(" \"stack:%d\"", op->index());
13468 }
13469 }
13470 int parent_index = -1;
13471 if (range->IsChild()) {
13472 parent_index = range->parent()->id();
13473 } else {
13474 parent_index = range->id();
13475 }
13476 LOperand* op = range->FirstHint();
13477 int hint_index = -1;
13478 if (op != NULL && op->IsUnallocated()) {
13479 hint_index = LUnallocated::cast(op)->virtual_register();
13480 }
13481 trace_.Add(" %d %d", parent_index, hint_index);
13482 UseInterval* cur_interval = range->first_interval();
13483 while (cur_interval != NULL && range->Covers(cur_interval->start())) {
13484 trace_.Add(" [%d, %d[",
13485 cur_interval->start().Value(),
13486 cur_interval->end().Value());
13487 cur_interval = cur_interval->next();
13488 }
13489
13490 UsePosition* current_pos = range->first_pos();
13491 while (current_pos != NULL) {
13492 if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
13493 trace_.Add(" %d M", current_pos->pos().Value());
13494 }
13495 current_pos = current_pos->next();
13496 }
13497
13498 trace_.Add(" \"\"\n");
13499 }
13500}
13501
13502
13503void HTracer::FlushToFile() {
13504 AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
13505 false);
13506 trace_.Reset();
13507}
13508
13509
13510void HStatistics::Initialize(CompilationInfo* info) {
13511 if (!info->has_shared_info()) return;
13512 source_size_ += info->shared_info()->SourceSize();
13513}
13514
13515
13516void HStatistics::Print() {
13517 PrintF(
13518 "\n"
13519 "----------------------------------------"
13520 "----------------------------------------\n"
13521 "--- Hydrogen timing results:\n"
13522 "----------------------------------------"
13523 "----------------------------------------\n");
13524 base::TimeDelta sum;
13525 for (int i = 0; i < times_.length(); ++i) {
13526 sum += times_[i];
13527 }
13528
13529 for (int i = 0; i < names_.length(); ++i) {
13530 PrintF("%33s", names_[i]);
13531 double ms = times_[i].InMillisecondsF();
13532 double percent = times_[i].PercentOf(sum);
13533 PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
13534
13535 size_t size = sizes_[i];
13536 double size_percent = static_cast<double>(size) * 100 / total_size_;
13537 PrintF(" %9zu bytes / %4.1f %%\n", size, size_percent);
13538 }
13539
13540 PrintF(
13541 "----------------------------------------"
13542 "----------------------------------------\n");
13543 base::TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
13544 PrintF("%33s %8.3f ms / %4.1f %% \n", "Create graph",
13545 create_graph_.InMillisecondsF(), create_graph_.PercentOf(total));
13546 PrintF("%33s %8.3f ms / %4.1f %% \n", "Optimize graph",
13547 optimize_graph_.InMillisecondsF(), optimize_graph_.PercentOf(total));
13548 PrintF("%33s %8.3f ms / %4.1f %% \n", "Generate and install code",
13549 generate_code_.InMillisecondsF(), generate_code_.PercentOf(total));
13550 PrintF(
13551 "----------------------------------------"
13552 "----------------------------------------\n");
13553 PrintF("%33s %8.3f ms %9zu bytes\n", "Total",
13554 total.InMillisecondsF(), total_size_);
13555 PrintF("%33s (%.1f times slower than full code gen)\n", "",
13556 total.TimesOf(full_code_gen_));
13557
13558 double source_size_in_kb = static_cast<double>(source_size_) / 1024;
13559 double normalized_time = source_size_in_kb > 0
13560 ? total.InMillisecondsF() / source_size_in_kb
13561 : 0;
13562 double normalized_size_in_kb =
13563 source_size_in_kb > 0
13564 ? static_cast<double>(total_size_) / 1024 / source_size_in_kb
13565 : 0;
13566 PrintF("%33s %8.3f ms %7.3f kB allocated\n",
13567 "Average per kB source", normalized_time, normalized_size_in_kb);
13568}
13569
13570
13571void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
13572 size_t size) {
13573 total_size_ += size;
13574 for (int i = 0; i < names_.length(); ++i) {
13575 if (strcmp(names_[i], name) == 0) {
13576 times_[i] += time;
13577 sizes_[i] += size;
13578 return;
13579 }
13580 }
13581 names_.Add(name);
13582 times_.Add(time);
13583 sizes_.Add(size);
13584}
13585
13586
13587HPhase::~HPhase() {
13588 if (ShouldProduceTraceOutput()) {
13589 isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
13590 }
13591
13592#ifdef DEBUG
13593 graph_->Verify(false); // No full verify.
13594#endif
13595}
13596
13597} // namespace internal
13598} // namespace v8